diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 000000000..eae65c93f --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +omit = + */test_* + **/conftest.py +data_file = /tmp/hyperion.coverage + +[report] +exclude_also = + if TYPE_CHECKING: + def __repr__ + raise NotImplementedError + @(abc\.)?abstractmethod + +[paths] +# Tests are run from installed location, map back to the src directory +source = + src + **/site-packages/ diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 56186e0fe..3b25b0e53 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -3,7 +3,7 @@ # The devcontainer should use the build target and run as root with podman # or docker with user namespaces. # -FROM python:3.11 as build +FROM python:3.11 AS build ARG PIP_OPTIONS @@ -24,7 +24,7 @@ WORKDIR /context # install python package into /venv RUN pip install ${PIP_OPTIONS} -FROM python:3.11-slim as runtime +FROM python:3.11-slim AS runtime # Add apt-get system dependecies for runtime here if needed diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 99e30fbc1..8c533ca7a 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,10 +1,40 @@ -# Contribute to the project +# How to contribute to mx-bluesky -Contributions and issues are most welcome! All issues and pull requests are -handled through [GitHub](https://github.com/DiamondLightSource/mx-bluesky/issues). Also, please check for any existing issues before -filing a new one. If you have a great idea but it involves big changes, please -file a ticket before making a pull request! We want to make sure you don't spend -your time coding something that might not fit the scope of the project. +We should prioritise working, agile code over strict standards, particularly for contributors that are not full time developers. The standards below are thus guidelines that developers should try to follow as much as possible (apart from when something is specified in bold, which is required). + +Contributions and issues are most welcome! All issues and pull requests are handled through [GitHub](https://github.com/DiamondLightSource/mx-bluesky/issues). Also, please check for any existing issues before filing a new one. If you have a great idea but it involves big changes, please file a ticket before making a pull request! We want to make sure you don't spend your time coding something that might not fit the scope of the project. + +## General Workflow + +1. An issue is created for the work. This issue should describe in as much detail as possible the work that needs to be done. Anyone is free to make a ticket and/or comment on one. +2. If a developer is going to do the work they assign themselves to the issue. +3. The developer makes a new branch with the format `issue_short_description` e.g. `122_create_a_contributing_file`. (External developers are also welcome to make forks) +4. The developer does the work on this branch, adding their work in small commits. Commit messages should be informative and prefixed with the issue number e.g. `(#122) Added contributing file`. +5. The developer submits a PR for the work. In the pull request should start with `Fixes #issue_num` e.g. `Fixes #122`, this will ensure the issue is automatically closed when the PR is merged. The developer should also add some background on how the reviewer might test the change. +6. If the developer has a particular person in mind to review the work they should assign that person to the PR as a reviewer. +7. The reviewer and developer go back and forth on the code until the reviewer approves it. (See "Reviewing Work" below) +8. Once the work is approved the original developer merges it. + +**Work should not be done without an associated ticket describing what the work is** + +## Reviewing Work + +**Work must be reviewed by another developer before it can be merged**. Remember that work is reviewed for a number of reasons: + +- In order to maintain quality and avoid errors +- Help people learn + +It is not a judgement on someone's technical abilities so be kind. + +It is suggested that the reviewer prefixes comments with something like the following: + +- **must**: A change that must be made before merging e.g. it will break something if not made +- **should/sugg**: A change that should be made e.g. definitely improves code quality but does not neccessarily break anything +- **nit**: A minor suggestion that the reviewer would like to see but is happy to leave as is e.g. rename a variable to something + +Developers are welcome to ignore **nit** comments if they wish and can choose not to do **should** comments but the must give a reason as to why they disagree with the change. + +For minor changes to code reviewers are welcome to make the changes themselves but in this case the original developer should then recheck what the reviewer has done. ## Issue or Discussion? diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md index 8200afe5c..7d647b379 100644 --- a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -1,8 +1,13 @@ Fixes #ISSUE +Link to dodal PR (if required): #XXX +(remember to update `setup.cfg` with the dodal commit tag if you need it for tests to pass!) + ### Instructions to reviewer on how to test: + 1. Do thing x 2. Confirm thing y happens ### Checks for reviewer + - [ ] Would the PR title make sense to a user on a set of release notes diff --git a/.github/actions/verify-nexus/Dockerfile b/.github/actions/verify-nexus/Dockerfile new file mode 100644 index 000000000..cd58062e1 --- /dev/null +++ b/.github/actions/verify-nexus/Dockerfile @@ -0,0 +1,3 @@ +FROM ghcr.io/githubgphl/imginfo:main +COPY entrypoint.sh /entrypoint.sh +ENTRYPOINT ["/entrypoint.sh"] diff --git a/.github/actions/verify-nexus/action.yml b/.github/actions/verify-nexus/action.yml new file mode 100644 index 000000000..0a9e66765 --- /dev/null +++ b/.github/actions/verify-nexus/action.yml @@ -0,0 +1,18 @@ +name: "Verify nexus" +description: "Verify nexus files against imginfo" +inputs: + filename: + description: "nexus file to verify" + required: true +outputs: + imginfo_stdout: + description: "imginfo output" + imginfo_stderr: + description: "imginfo error output" + imginfo_exit_code: + description: "imginfo exit code" +runs: + using: "docker" + image: "Dockerfile" + args: + - ${{ inputs.filename }} diff --git a/.github/actions/verify-nexus/entrypoint.sh b/.github/actions/verify-nexus/entrypoint.sh new file mode 100755 index 000000000..30883f279 --- /dev/null +++ b/.github/actions/verify-nexus/entrypoint.sh @@ -0,0 +1,25 @@ +#!/bin/sh -l +echo "Contents of /github/workspace:" +ls /github/workspace +echo "current dir: $(pwd)" +echo "Running imginfo on $1" +/imginfo/imginfo $1 >> imginfo_out_file 2>> imginfo_err_file +{ echo "imginfo_output<> "$GITHUB_OUTPUT" +{ echo "imginfo_output<> "$GITHUB_OUTPUT" +echo "imginfo_exit_code=$?" >> "$GITHUB_OUTPUT" +echo "------------- IMGINFO STDOUT -------------" +cat imginfo_out_file +echo "------------- IMGINFO STDERR -------------" +cat imginfo_err_file +echo "------------------------------------------" +if [ -s imginfo_err_file ]; then + echo "ERRORS IN IMGINFO PROCESSING!" + exit 1 +fi +echo "VALIDATED SUCCESSFULLY!" diff --git a/.github/workflows/_test.yml b/.github/workflows/_test.yml index f652d4145..3d200cdff 100644 --- a/.github/workflows/_test.yml +++ b/.github/workflows/_test.yml @@ -51,7 +51,7 @@ jobs: pip-install: ".[dev]" - name: Run tests - run: tox -e tests + run: tox -e tests -- -m "not dlstbx" - name: Upload coverage to Codecov uses: codecov/codecov-action@v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fb9ef11f3..4906989d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: strategy: matrix: runs-on: ["ubuntu-latest"] # can add windows-latest, macos-latest - python-version: ["3.10", "3.11"] # , "3.12"] # add 3.12 when p4p #145 is fixed + python-version: ["3.11"] # , "3.12"] # add 3.12 when p4p #145 is fixed include: # Include one that runs in the dev environment - runs-on: "ubuntu-latest" @@ -52,12 +52,12 @@ jobs: if: needs.check.outputs.branch-pr == '' uses: ./.github/workflows/_dist.yml - pypi: - if: github.ref_type == 'tag' - needs: dist - uses: ./.github/workflows/_pypi.yml - permissions: - id-token: write + # pypi: + # if: github.ref_type == 'tag' + # needs: dist + # uses: ./.github/workflows/_pypi.yml + # permissions: + # id-token: write release: if: github.ref_type == 'tag' diff --git a/.github/workflows/pin_versions.py b/.github/workflows/pin_versions.py new file mode 100755 index 000000000..26a0fba1c --- /dev/null +++ b/.github/workflows/pin_versions.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 +import argparse +import locale +import os +import re +import subprocess +from functools import partial +from sys import stderr, stdout + +SETUP_CFG_PATTERN = re.compile("(.*?\\S)\\s*(@(.*))?$") +SETUP_UNPINNED_PATTERN = re.compile("(.*?\\S)\\s*([<>=]+(.*))?$") +PIP = "pip" + + +def rename_original(suffix): + os.rename("setup.cfg", "setup.cfg" + suffix) + + +def normalize(package_name: str): + return re.sub(r"[-_.]+", "-", package_name).lower() + + +def fetch_pin_versions() -> dict[str, str]: + process = run_pip_freeze() + if process.returncode == 0: + output = process.stdout + lines = output.split("\n") + pin_versions = {} + for line in lines: + kvpair = line.split("==") + if len(kvpair) != 2: + stderr.write(f"Unable to parse {line} - ignored\n") + else: + pin_versions[normalize(kvpair[0]).strip()] = kvpair[1].strip() + return pin_versions + else: + stderr.write(f"pip freeze failed with error code {process.returncode}\n") + stderr.write(process.stderr) + exit(1) + + +def run_pip_freeze(): + process = subprocess.run( + [PIP, "freeze"], capture_output=True, encoding=locale.getpreferredencoding() + ) + return process + + +def process_setup_cfg(input_fname, output_fname, dependency_processor): + with open(input_fname) as input_file: + with open(output_fname, "w") as output_file: + process_files(input_file, output_file, dependency_processor) + + +def process_files(input_file, output_file, dependency_processor): + while line := input_file.readline(): + output_file.write(line) + if line.startswith("install_requires"): + break + while (line := input_file.readline()) and not line.startswith("["): + if line.isspace(): + output_file.write(line) + else: + dependency_processor(line, output_file) + output_file.write(line) + while line := input_file.readline(): + output_file.write(line) + + +def strip_comment(line: str): + split = line.rstrip("\n").split("#", 1) + return split[0], (split[1] if len(split) > 1 else None) + + +def write_with_comment(comment, text, output_file): + output_file.write(text) + if comment: + output_file.write(" #" + comment) + output_file.write("\n") + + +def update_setup_cfg_line(version_map: dict[str, str], line, output_file): + stripped_line, comment = strip_comment(line) + if match := SETUP_UNPINNED_PATTERN.match(stripped_line): + normalized_name = normalize(match[1].strip()) + if normalized_name not in version_map: + stderr.write( + f"Unable to find {normalized_name} in installed python packages\n" + ) + exit(1) + + write_with_comment( + comment, + f" {normalized_name} == {version_map[normalized_name]}", + output_file, + ) + else: + output_file.write(line) + + +def write_commit_message(pinned_versions: dict[str, str]): + message = f"Pin dependencies prior to release. Dodal {pinned_versions['dls-dodal']}, nexgen {pinned_versions['nexgen']}" + stdout.write(message) + + +def unpin_versions(line, output_file): + stripped_line, comment = strip_comment(line) + if match := SETUP_CFG_PATTERN.match(stripped_line): + if match[3] and match[3].strip().startswith("git+"): + write_with_comment(comment, match[1], output_file) + return + + output_file.write(line) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Pin dependency versions in setup.cfg") + parser.add_argument( + "--unpin", + help="remove pinned hashes from setup.cfg prior to pip installing latest", + action="store_true", + ) + args = parser.parse_args() + + if args.unpin: + rename_original(".orig") + process_setup_cfg("setup.cfg.orig", "setup.cfg", unpin_versions) + else: + rename_original(".unpinned") + installed_versions = fetch_pin_versions() + process_setup_cfg( + "setup.cfg.unpinned", + "setup.cfg", + partial(update_setup_cfg_line, installed_versions), + ) + write_commit_message(installed_versions) diff --git a/.github/workflows/pre_release_workflow.yml b/.github/workflows/pre_release_workflow.yml new file mode 100644 index 000000000..28a3d0d53 --- /dev/null +++ b/.github/workflows/pre_release_workflow.yml @@ -0,0 +1,48 @@ +name: pre_release_workflow +on: + workflow_dispatch: + inputs: + tagName: + description: 'Tag to create' + required: true + type: string + default: 'vX.Y.Z' +jobs: + pin_dependency_versions: + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{secrets.GITHUB_TOKEN}} + steps: + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: x64 + - name: checkout + uses: actions/checkout@v4 + - name: Reset pinned hash versions + run: .github/workflows/pin_versions.py --unpin + - name: Install with latest dependencies + run: pip install -e .[dev] + - id: pin_versions + name: Pin Versions + run: | + MSG=$(.github/workflows/pin_versions.py) + echo "COMMIT_MESSAGE=\"$MSG\"" >> "$GITHUB_OUTPUT" + - name: Add setup.cfg + run: | + git add setup.cfg + - name: Commit changes + run: | + git config --global user.name "${{ github.actor }}" + git config --global user.email "${{ github.actor }}@users.noreply.github.com" + git commit -m ${{steps.pin_versions.outputs.COMMIT_MESSAGE}} + git tag ${{inputs.tagName}} + git push origin ${{inputs.tagName}} + - name: Create Release + run: gh release create --generate-notes --draft ${{inputs.tagName}} + - name: Edit Release Notes + run: | + echo -e "${{ steps.pin_versions.outputs.COMMIT_MESSAGE }}\n\n" > "$RUNNER_TEMP/relnotes.txt" + gh release view ${{ inputs.tagName }} | sed '0,/^--$/d' >> "$RUNNER_TEMP/relnotes.txt" + gh release edit ${{ inputs.tagName }} --notes-file "$RUNNER_TEMP/relnotes.txt" diff --git a/.gitignore b/.gitignore index dc325da45..f42582b2c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] +*$py.class # C extensions *.so @@ -8,6 +9,12 @@ __pycache__/ # Distribution / packaging .Python env/ +# Output +*.png +!docs/*.png + +# Distribution / packaging +.Python build/ develop-eggs/ dist/ @@ -19,10 +26,14 @@ lib64/ parts/ sdist/ var/ +**/_version.py +wheels/ +pip-wheel-metadata/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg -**/_version.py +MANIFEST # PyInstaller # Usually these files are written by a python script from a template @@ -37,14 +48,18 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml cov.xml +*.cover +*.py,cover +.hypothesis/ .pytest_cache/ -.mypy_cache/ +cov.xml # Translations *.mo @@ -52,6 +67,16 @@ cov.xml # Django stuff: *.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy # Sphinx documentation docs/_build/ @@ -59,9 +84,66 @@ docs/_build/ # PyBuilder target/ -# likely venv names -.venv* -venv* +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +.venv*/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# gedit backup files +*~ + +# Log files +**/tmp # further build artifacts lockfiles/ @@ -87,3 +169,5 @@ edm_serial/ **/parameters/fixed_target/cs/fiducial_*.txt **/parameters/fixed_target/*.json **/parameters/fixed_target/*/*.map +# idea project files +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f652dd2b..64fb0b4f8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,9 +3,13 @@ repos: rev: v4.5.0 hooks: - id: check-added-large-files + args: ["--maxkb=500"] - id: check-yaml + args: ["--allow-multiple-documents"] - id: check-merge-conflict - id: end-of-file-fixer + - id: no-commit-to-branch + name: "Don't commit to 'main'" - repo: local hooks: @@ -29,5 +33,5 @@ repos: hooks: - id: mypy files: 'src/.*\.py$' - additional_dependencies: [types-requests, types-redis] + additional_dependencies: [types-requests, types-redis, pydantic] args: ["--ignore-missing-imports", "--no-strict-optional"] diff --git a/.vscode/extensions.json b/.vscode/extensions.json index 933c580cd..a1227b348 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,5 +1,10 @@ { "recommendations": [ "ms-vscode-remote.remote-containers", + "ms-python.python", + "tamasfe.even-better-toml", + "redhat.vscode-yaml", + "ryanluker.vscode-coverage-gutters", + "charliermarsh.Ruff" ] } diff --git a/Dockerfile b/Dockerfile index 3d17f209c..823080717 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ # The devcontainer should use the developer target and run as root with podman # or docker with user namespaces. ARG PYTHON_VERSION=3.11 -FROM python:${PYTHON_VERSION} as developer +FROM python:${PYTHON_VERSION} AS developer # Add any system dependencies for the developer/build environment here RUN apt-get update && apt-get install -y --no-install-recommends \ @@ -13,13 +13,13 @@ RUN python -m venv /venv ENV PATH=/venv/bin:$PATH # The build stage installs the context into the venv -FROM developer as build +FROM developer AS build COPY . /context WORKDIR /context RUN touch dev-requirements.txt && pip install -c dev-requirements.txt . # The runtime stage copies the built venv into a slim runtime container -FROM python:${PYTHON_VERSION}-slim as runtime +FROM python:${PYTHON_VERSION}-slim AS runtime # Add apt-get system dependecies for runtime here if needed COPY --from=build /venv/ /venv/ ENV PATH=/venv/bin:$PATH diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 000000000..23b7e58b7 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,9 @@ +coverage: + status: + project: + default: + target: 85% # the required coverage value + threshold: 1% # the leniency in hitting the target + +ignore: + - "**/conftest.py" diff --git a/conftest.py b/conftest.py new file mode 100644 index 000000000..728b0969e --- /dev/null +++ b/conftest.py @@ -0,0 +1,33 @@ +from collections.abc import Iterator +from os import environ, getenv +from unittest.mock import patch + +import pytest + +environ["HYPERION_TEST_MODE"] = "true" + +print("Adjusting S03 EPICS environment ...") +s03_epics_server_port = getenv("S03_EPICS_CA_SERVER_PORT") +s03_epics_repeater_port = getenv("S03_EPICS_CA_REPEATER_PORT") +if s03_epics_server_port: + environ["EPICS_CA_SERVER_PORT"] = s03_epics_server_port + print(f"[EPICS_CA_SERVER_PORT] = {s03_epics_server_port}") +if s03_epics_repeater_port: + environ["EPICS_CA_REPEATER_PORT"] = s03_epics_repeater_port + print(f"[EPICS_CA_REPEATER_PORT] = {s03_epics_repeater_port}") + + +def pytest_addoption(parser): + parser.addoption( + "--logging", + action="store_true", + default=False, + help="Log during all tests (not just those that are testing logging logic)", + ) + + +@pytest.fixture(scope="session", autouse=True) +def default_session_fixture() -> Iterator[None]: + print("Patching bluesky 0MQ Publisher in __main__ for the whole session") + with patch("mx_bluesky.hyperion.__main__.Publisher"): + yield diff --git a/docs/conf.py b/docs/conf.py index c1ea89997..a9713ed0b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -44,6 +44,24 @@ "sphinx_copybutton", # For the card element "sphinx_design", + # For markdown + "myst_parser", +] + +myst_enable_extensions = [ + "amsmath", + "attrs_inline", + "colon_fence", + "deflist", + "dollarmath", + "fieldlist", + "html_admonition", + "html_image", + "replacements", + "smartquotes", + "strikethrough", + "substitution", + "tasklist", ] # If true, Sphinx will warn about all references where the target cannot @@ -83,7 +101,7 @@ default_role = "any" # The suffix of source filenames. -source_suffix = [".rst", ".md"] +source_suffix = {".rst": "restructuredtext", ".md": "markdown"} # The master toctree document. master_doc = "index" diff --git a/docs/developer/general/explanations/decisions/0002-repository-structure.rst b/docs/developer/general/explanations/decisions/0002-repository-structure.rst new file mode 100644 index 000000000..d13f2bbb0 --- /dev/null +++ b/docs/developer/general/explanations/decisions/0002-repository-structure.rst @@ -0,0 +1,46 @@ +2. mx-bluesky repository structure +================================== + +Date: 2024-08-21 + +Status +------ + +Accepted + +Context +------- + +Initially, we wanted to separate "application code" such as hyperion from "library code" generic over MX beamlines belonging in ``mx-bluesky``. +However, as these were developed together and also together with ``dodal``, dependency management became impossible. + +Decision +-------- + +We will stick to a monorepo repository structure as follows: + +.. code-block:: text + + mx_bluesky/ + ├-src/ + | └-mx_bluesky/ + | ├-common/ # Plan stubs, utilities, and other common code + | ├-beamlines/ + | | ├-i03/ + | | ├-i04/ + | | └-i24/ + | | └-serial/ # Plans for one beamline go in the respective module + | └-hyperion/ # Plans for more than one beamline go in the top level + ├-tests/ + | ├-unit_tests/ # Tests are separated into "unit_tests" and "system_tests" + | | └-{mirror of mx_bluesky structure}/ # Where the former refers to tests which can be run without + | ├-system_tests/ # access to external services, and the latter might need + | | └-{mirror of mx_bluesky structure}/ # to talk to ISPyB, the DLS filesystem, etc. + | └-test_data/ + └-docs/ + ├-developer/ + └-user/ + +To preserve some of the benefits we would have had from separate repositories, code from beamline or "application" +modules (e.g. ``mx_bluesky.i24.serial`` or ``mx_bluesky.hyperion``) may import from ``mx_bluesky.common`` but not +the other way around - this should be enforced with a check in CI. diff --git a/docs/developer/general/explanations/decisions/0003-python-version-support.rst b/docs/developer/general/explanations/decisions/0003-python-version-support.rst new file mode 100644 index 000000000..c281c94cb --- /dev/null +++ b/docs/developer/general/explanations/decisions/0003-python-version-support.rst @@ -0,0 +1,29 @@ +1. Python version support +========================= + +Date: 2024-08-22 + +Status +------ + +Accepted + +Context +------- + +We need to decide which versions of python we will support. We want to be able to make use of new python features, +and reduce the number of different supported versions, but we also want scientists in a variety of environments to +be able to use the code in this repository. At the time of writing this, we merged https://github.com/DiamondLightSource/hyperion +into this repository. It only supported python 3.11, while ``mx-bluesky`` at this time supported 3.10 and 3.11. + +Decision +-------- + +We will support no older python version than described in https://numpy.org/neps/nep-0029-deprecation_policy.html +We may if appropriate choose to drop python versions before they are dropped by that schedule. Instead +of modifying ``hyperion`` to work with python 3.10, we are dropping support for it at the time of writing this. + +Consequences +------------ + +We must always support at least the newest major python version, and most likely several versions behind it, but no version older than 42 months. diff --git a/docs/developer/general/how-to/contribute.rst b/docs/developer/general/how-to/contribute.rst index 737cd4824..56966ffe3 100644 --- a/docs/developer/general/how-to/contribute.rst +++ b/docs/developer/general/how-to/contribute.rst @@ -2,3 +2,4 @@ Contributing ------------ .. include:: ../../../../.github/CONTRIBUTING.md + :parser: markdown diff --git a/docs/developer/hyperion/index.rst b/docs/developer/hyperion/index.rst new file mode 100644 index 000000000..3f934ad59 --- /dev/null +++ b/docs/developer/hyperion/index.rst @@ -0,0 +1,20 @@ +General MX-Bluesky Developer Guide +================================== + +Documentation is split into four categories, and each is also accessible from links in the side-bar. + +.. grid:: 2 + :gutter: 2 + + .. grid-item-card:: :material-regular:`description;3em` + + .. toctree:: + :caption: Reference + :maxdepth: 1 + + reference/param-hierarchy + reference/readme + + +++ + + Technical reference material on standards in use. diff --git a/docs/developer/hyperion/reference/param-hierarchy.rst b/docs/developer/hyperion/reference/param-hierarchy.rst new file mode 100644 index 000000000..9e59cbc5f --- /dev/null +++ b/docs/developer/hyperion/reference/param-hierarchy.rst @@ -0,0 +1,4 @@ +Hyperion Parameter class hierarchy +================================== + +TODO: automate including the param hierarchy here diff --git a/docs/developer/hyperion/reference/param_hierarchy.puml b/docs/developer/hyperion/reference/param_hierarchy.puml new file mode 100644 index 000000000..64e3f9ef3 --- /dev/null +++ b/docs/developer/hyperion/reference/param_hierarchy.puml @@ -0,0 +1,78 @@ +@startuml hyperion_parameter_model +'https://plantuml.com/class-diagram +title Hyperion Parameter Model + +abstract class BaseModel + +package Mixins { + class WithSample + class WithScan + class WithOavCentring + class WithSnapshot + class OptionalXyzStarts + class XyzStarts + class OptionalGonioAngleStarts + class SplitScan + class RotationScanPerSweep + class RotationExperiment +} + +package Experiments { + class DiffractionExperiment + class DiffractionExperimentWithSample + class GridCommon + class GridScanWithEdgeDetect + class PinTipCentreThenXrayCentre + class RotationScan + class MultiRotationScan + class RobotLoadThenCentre + class SpecifiedGridScan + class ThreeDGridScan +} + +class HyperionParameters +note bottom: Base class for all experiment parameter models + +class TemporaryIspybExtras +note bottom: To be removed + + +BaseModel <|-- HyperionParameters +BaseModel <|-- SplitScan +BaseModel <|-- OptionalGonioAngleStarts +BaseModel <|-- OptionalXyzStarts +BaseModel <|-- TemporaryIspybExtras +BaseModel <|-- WithOavCentring +BaseModel <|-- WithSnapshot +BaseModel <|-- WithSample +BaseModel <|-- WithScan +BaseModel <|-- XyzStarts + +RotationScan *-- TemporaryIspybExtras +MultiRotationScan *-- TemporaryIspybExtras +OptionalGonioAngleStarts <|-- RotationScanPerSweep +OptionalXyzStarts <|-- RotationScanPerSweep +DiffractionExperimentWithSample <|-- RotationExperiment +HyperionParameters <|-- DiffractionExperiment +WithSnapshot <|-- DiffractionExperiment +DiffractionExperiment <|-- DiffractionExperimentWithSample +WithSample <|-- DiffractionExperimentWithSample +DiffractionExperimentWithSample <|-- GridCommon +GridCommon <|-- GridScanWithEdgeDetect +GridCommon <|-- PinTipCentreThenXrayCentre +GridCommon <|-- RobotLoadThenCentre +GridCommon <|-- SpecifiedGridScan +WithScan <|-- SpecifiedGridScan +SpecifiedGridScan <|-- ThreeDGridScan +SplitScan <|-- ThreeDGridScan +WithOavCentring <|-- GridCommon +WithScan <|-- RotationScan +RotationScanPerSweep <|-- RotationScan +MultiRotationScan *-- RotationScanPerSweep +RotationExperiment <|-- RotationScan +RotationExperiment <|-- MultiRotationScan +SplitScan <|-- MultiRotationScan +XyzStarts <|-- SpecifiedGridScan +OptionalGonioAngleStarts <|-- GridCommon +OptionalGonioAngleStarts <|-- RotationScan +@enduml diff --git a/docs/developer/hyperion/reference/readme.md b/docs/developer/hyperion/reference/readme.md new file mode 100644 index 000000000..d1e124df1 --- /dev/null +++ b/docs/developer/hyperion/reference/readme.md @@ -0,0 +1,96 @@ +# hyperion + +![Tests](https://github.com/DiamondLightSource/hyperion/actions/workflows/code.yml/badge.svg) [![codecov](https://codecov.io/gh/DiamondLightSource/hyperion/branch/main/graph/badge.svg?token=00Ww81MHe8)](https://codecov.io/gh/DiamondLightSource/hyperion) + +Repository for the Hyperion project to implement Unattended Data Collections on the Diamond MX beamlines using the [BlueSky](https://nsls-ii.github.io/bluesky/) / Ophyd framework from BNL. + +Currently the software is able to: + +- Centre a sample, first using an optical camera, then using an xray grid scan. This centring is done at two orthogonal angles so that the sample is centred in 3D. +- Perform a rotation scan to take diffraction data of the sample + +Left to do is: + +- Mount/unmount samples +- Set up the beamline to be in a standard state for collection +- Change energy of the beamline + +# Development Installation + +This project supports only the most recent Python version for which our dependencies are available - currently Python 3.11. + +Run `./utility_scripts/dls_dev_env.sh` (This assumes you're on a DLS machine. If you are not, you should be able to just run a subset of this script) + +Note that because Hyperion makes heavy use of [Dodal](https://github.com/DiamondLightSource/dodal) this will also pull a local editable version of dodal to the parent folder of this repo. + +# Controlling the Gridscan Externally (e.g. from GDA) + +## Starting the bluesky runner + +You can start the bluesky runner by running `run_hyperion.sh`. Note that this will fail on a developer machine unless you have a simulated beamline running, instead you should do `run_hyperion.sh --dev --skip-startup-connection`, which will give you a running instance (note that without hardware trying to run a plan on this will fail). The `--dev` flag ensures that logging will not be sent to the production Graylog. + +This script will determine whether you are on a beamline or a production machine based on the `BEAMLINE` environment variable. If on a beamline Hyperion will run with `INFO` level logging, sending its logs to both production graylog and to the beamline/log/bluesky/hyperion.log on the shared file system. + +If in a dev environment Hyperion will log to a local graylog instance instead and into a file at `./tmp/dev/hyperion.log`. A local instance of graylog will need to be running for this to work correctly. To set this up and run up the containers on your local machine run the `setup_graylog.sh` script. + +This uses the generic defaults for a local graylog instance. It can be accessed on `localhost:9000` where the username and password for the graylog portal are both admin. + +The hyperion python module can also be run directly without the startup script. It takes the same command line options, including: + +`INFO` level logging of the Bluesky event documents can be enabled with the flag + +``` +python -m hyperion --dev --verbose-event-logging +``` + +Lastly, you can choose to skip running the hardware connection scripts on startup with the flag + +``` +python -m hyperion --skip-startup-connection +``` + +## Testing + +Unit tests can be run with `python -m pytest -m "not s03" --random-order`. To see log output from tests you can turn on logging with the `--logging` command line option and then use the `-s` command line option to print logs into the console. So to run the unit tests such that all logs are at printed to the terminal, you can use `python -m pytest -m "not s03" --random-order --logging -s`. Note that this will likely overrun your terminal buffer, so you can narrow the selection of tests with the `-k ""` option. + +To be able to run the system tests, or a complete fake scan, we need the simulated S03 beamline. This can be found at: https://gitlab.diamond.ac.uk/controls/python3/s03_utils + +To fake interaction and processing with Zocalo, you can run `fake_zocalo/dls_start_fake_zocalo.sh`, and make sure to run `module load dials/latest` before starting hyperion (in the same terminal). + +## Tracing + +Tracing information (the time taken to complete different steps of experiments) is collected by an [OpenTelemetry](https://opentelemetry.io/) tracer, and currently we export this information to a local Jaeger monitor (if available). To see the tracing output, run the [Jaeger all-in-one container](https://www.jaegertracing.io/docs/1.6/getting-started/), and go to the web interface at http://localhost:16686. + +## Starting a scan + +To start a scan you can do the following: + +``` +curl -X PUT http://127.0.0.1:5005/flyscan_xray_centre/start --data-binary "@tests/test_data/parameter_json_files/test_parameters.json" -H "Content-Type: application/json" +``` + +## Getting the Runner Status + +To get the status of the runner: + +``` +curl http://127.0.0.1:5005/status +``` + +## Stopping the Scan + +To stop a scan that is currently running: + +``` +curl -X PUT http://127.0.0.1:5005/stop + +``` + +## Writing out `DEBUG` logs + +To make the app write the `DEBUG` level logs stored in the `CircularMemoryHandler`: + +``` +curl -X PUT http://127.0.0.1:5005/flush_debug_log + +``` diff --git a/docs/developer/index.rst b/docs/developer/index.rst index ec71ac2d9..5ff756662 100644 --- a/docs/developer/index.rst +++ b/docs/developer/index.rst @@ -18,6 +18,18 @@ Documentation is split general and topic-specific sections. Each section is spli General information for working on mx-bluesky + .. grid-item-card:: :material-outlined:`precision_manufacturing;3em` + + .. toctree:: + :caption: Hyperion + :maxdepth: 1 + + hyperion/index + + +++ + + Documentation relating to hyperion + .. grid-item-card:: :material-regular:`apps;3em` .. toctree:: @@ -30,7 +42,7 @@ Documentation is split general and topic-specific sections. Each section is spli Documentation relating to serial crystallography on I24 - .. grid-item-card:: :material-regular:`apps;3em` + .. grid-item-card:: :material-outlined:`preview;3em` .. toctree:: :caption: Murko Integration diff --git a/hyperion_other/workflows/add_assignee_when_pr_opened.yml b/hyperion_other/workflows/add_assignee_when_pr_opened.yml new file mode 100644 index 000000000..6713ab01c --- /dev/null +++ b/hyperion_other/workflows/add_assignee_when_pr_opened.yml @@ -0,0 +1,26 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Assign author of pull request to referenced issue that was not assigned to issue ticket beforehand +on: + pull_request: + types: [ready_for_review, opened, review_requested] +jobs: + get_author_of_pull_request_query: + uses: ./.github/workflows/get_issue_from_pr.yml + with: + pr_id: ${{ github.event.pull_request.number }} + secrets: inherit + + assign_author_to_issue_mutation: + runs-on: ubuntu-latest + needs: get_author_of_pull_request_query + steps: + - id: mutation_to_add_author + env: + GITHUB_TOKEN: ${{ secrets.GHPROJECT_TOKEN }} + run: | + gh api graphql -f query=' + mutation ($assignable_id: ID!, $author_id: [ID!]! ){ + addAssigneesToAssignable(input:{assignableId: $assignable_id, assigneeIds: $author_id }) { + clientMutationId + } + }' -f assignable_id=${{ needs.get_author_of_pull_request_query.outputs.issue_from_pr }} -f author_id=${{ needs.get_author_of_pull_request_query.outputs.author }} diff --git a/hyperion_other/workflows/assigned_issues_to_in_progress.yml b/hyperion_other/workflows/assigned_issues_to_in_progress.yml new file mode 100644 index 000000000..00d0c3b50 --- /dev/null +++ b/hyperion_other/workflows/assigned_issues_to_in_progress.yml @@ -0,0 +1,13 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Move issue to in progress when assigned +on: + issues: + types: + - assigned +jobs: + move_to_in_progress: + uses: ./.github/workflows/get_project_data_and_move_column.yml + with: + column_name: In Progress + issue_id: ${{ github.event.issue.node_id }} + secrets: inherit diff --git a/hyperion_other/workflows/code.yml b/hyperion_other/workflows/code.yml new file mode 100644 index 000000000..d626d703f --- /dev/null +++ b/hyperion_other/workflows/code.yml @@ -0,0 +1,80 @@ +name: Code CI + +on: + push: + pull_request: + schedule: + # Run every Monday at 8am to check latest versions of dependencies + - cron: "0 8 * * WED" + +jobs: + lint: + # pull requests are a duplicate of a branch push if within the same repo. + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-latest + steps: + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + architecture: x64 + + - name: Checkout Hyperion + uses: actions/checkout@v4 + + - name: Install ruff + run: pip install ruff + + - name: Run ruff + run: ruff check . + + tests: + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install with latest dependencies + run: pip install -e .[dev] + + - name: Run Pyright on changes + run: diff-quality --violations=pyright --fail-under=100 + + - name: Run tests + run: pytest --logging -s --random-order -m "not (dlstbx or s03)" + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + name: ${{ matrix.python }}/${{ matrix.os }} + files: cov.xml + + - name: Prepare test data for reference nexus + run: hyperion-populate-test-and-meta-files + + - name: Run imginfo on reference nexus + uses: ./.github/actions/verify-nexus + id: verify_reference_nexus + with: + filename: "tests/test_data/nexus_files/rotation/ins_8_5.nxs" + + # ugly hack because we get double free error on exit + - name: Generate test nexus files + run: hyperion-generate-test-nexus + - name: Report test nexus files + run: echo "filename=$(cat OUTPUT_FILENAME)" >> $GITHUB_OUTPUT + id: generated_nexus + + - name: Run imginfo on generated nexus + uses: ./.github/actions/verify-nexus + id: verify_generated_nexus + with: + filename: ${{ steps.generated_nexus.outputs.filename }} diff --git a/hyperion_other/workflows/container_tests.sh b/hyperion_other/workflows/container_tests.sh new file mode 100644 index 000000000..75b234cc4 --- /dev/null +++ b/hyperion_other/workflows/container_tests.sh @@ -0,0 +1 @@ +pytest --random-order -m "not (s03 or dlstbx)" diff --git a/hyperion_other/workflows/get_issue_from_pr.yml b/hyperion_other/workflows/get_issue_from_pr.yml new file mode 100644 index 000000000..479fc4e5e --- /dev/null +++ b/hyperion_other/workflows/get_issue_from_pr.yml @@ -0,0 +1,51 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Resuable workflow to get an issue from a PR +on: + workflow_call: + inputs: + pr_id: + required: true + type: string + secrets: + GHPROJECT_TOKEN: + required: true + outputs: + issue_from_pr: + description: "The issue that the PR relates to" + value: ${{ jobs.get_issue_from_pr.outputs.job_issue_id }} + author: + description: "The author of the PR" + value: ${{ jobs.get_issue_from_pr.outputs.author }} +jobs: + get_issue_from_pr: + name: Get issue from PR + runs-on: ubuntu-latest + # Map the job outputs to step outputs + outputs: + job_issue_id: ${{ steps.get_issue.outputs.step_issue_id }} + author: ${{ steps.get_issue.outputs.author }} + steps: + - id: get_issue + env: + GITHUB_TOKEN: ${{ secrets.GHPROJECT_TOKEN }} + ORGANIZATION: ${{ github.repository_owner }} + REPO_ORG_NAME: ${{ github.repository }} # Contains the repo_name in format org/repo_name + run: | + gh api graphql -f query=' + query($org: String!, $repo: String!){ + repository(owner: $org, name: $repo) { + pullRequest(number: ${{ inputs.pr_id }}) { + closingIssuesReferences(first: 1) { + edges { + node { + id + } + } + } + author { ... on User {id} } + } + } + }' -f org=$ORGANIZATION -F repo=${REPO_ORG_NAME##*/} > project_data.json + + echo 'step_issue_id='$(jq '.data.repository.pullRequest.closingIssuesReferences.edges[0].node.id' project_data.json) >> $GITHUB_OUTPUT + echo 'author='$(jq '.data.repository.pullRequest.author.id' project_data.json) >> $GITHUB_OUTPUT diff --git a/hyperion_other/workflows/get_project_data_and_move_column.yml b/hyperion_other/workflows/get_project_data_and_move_column.yml new file mode 100644 index 000000000..314f46a65 --- /dev/null +++ b/hyperion_other/workflows/get_project_data_and_move_column.yml @@ -0,0 +1,98 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Reusable workflow for moving issues on project boards +on: + workflow_call: + inputs: + column_name: # The column to move the issue to + required: true + type: string + issue_id: # The issue to move + required: true + type: string + secrets: + GHPROJECT_TOKEN: + required: true +jobs: + move_issue_to_project_column: + runs-on: ubuntu-latest + steps: + - name: Get project data + # This will get information about the project in general and add it to the environment. + # * PROJECT_ID - The ID for the project itself + # * STATUS_FIELD_ID - The ID for the field that contains the column name + # * NEW_OPTION_ID - The ID for the column name we are moving into + env: + GITHUB_TOKEN: ${{ secrets.GHPROJECT_TOKEN }} + ORGANIZATION: ${{ github.repository_owner }} + PROJECT_NUMBER: 6 + run: | + gh api graphql -f query=' + query($org: String!, $number: Int!) { + organization(login: $org){ + projectV2(number: $number) { + id + fields(first:20) { + nodes { + ... on ProjectV2Field { + id + name + } + ... on ProjectV2SingleSelectField { + id + name + options { + id + name + } + } + } + } + } + } + }' -f org=$ORGANIZATION -F number=$PROJECT_NUMBER > project_data.json + + echo 'PROJECT_ID='$(jq '.data.organization.projectV2.id' project_data.json) >> $GITHUB_ENV + echo 'STATUS_FIELD_ID='$(jq '.data.organization.projectV2.fields.nodes[] | select(.name== "Status") | .id' project_data.json) >> $GITHUB_ENV + echo 'NEW_OPTION_ID='$(jq '.data.organization.projectV2.fields.nodes[] | select(.name== "Status") | .options[] | select(.name=="${{ inputs.column_name }}") |.id' project_data.json) >> $GITHUB_ENV + - name: Add issue to project + # If the issue has not been added to the project this will add it + env: + GITHUB_TOKEN: ${{ secrets.GHPROJECT_TOKEN }} + ISSUE_ID: ${{ inputs.issue_id }} + run: | + item_id="$( gh api graphql -f query=' + mutation($project:ID!, $pr:ID!) { + addProjectV2ItemById(input: {projectId: $project, contentId: $pr}) { + item { + id + } + } + }' -f project=$PROJECT_ID -f pr=$ISSUE_ID --jq '.data.addProjectV2ItemById.item.id')" + + echo 'ITEM_ID='$item_id >> $GITHUB_ENV + + - name: Set column + # Puts the issue in the new column + env: + GITHUB_TOKEN: ${{ secrets.GHPROJECT_TOKEN }} + run: | + gh api graphql -f query=' + mutation ( + $project: ID! + $item: ID! + $status_field: ID! + $status_value: String! + ) { + set_status: updateProjectV2ItemFieldValue(input: { + projectId: $project + itemId: $item + fieldId: $status_field + value: { + singleSelectOptionId: $status_value + } + }) { + projectV2Item { + id + } + } + }' -f project=$PROJECT_ID -f item=$ITEM_ID -f status_field=$STATUS_FIELD_ID -f status_value=${{ env.NEW_OPTION_ID }} --silent diff --git a/hyperion_other/workflows/linkcheck.yml b/hyperion_other/workflows/linkcheck.yml new file mode 100644 index 000000000..daa5bed90 --- /dev/null +++ b/hyperion_other/workflows/linkcheck.yml @@ -0,0 +1,34 @@ +name: Link Check + +on: + schedule: + # Run every Monday at 8am to check URL links still resolve + - cron: "0 8 * * WED" + +jobs: + docs: + strategy: + fail-fast: false + matrix: + python: ["3.11"] + + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install python version + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + + - name: Install dependencies + run: | + touch requirements_dev.txt + pip install -r requirements_dev.txt -e .[dev] + + - name: Check links + run: tox -e docs -- -b linkcheck diff --git a/hyperion_other/workflows/open_prs_to_review.yml b/hyperion_other/workflows/open_prs_to_review.yml new file mode 100644 index 000000000..8d2f3062e --- /dev/null +++ b/hyperion_other/workflows/open_prs_to_review.yml @@ -0,0 +1,19 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Add issues with open PRs to Review +on: + pull_request: + types: [ready_for_review, opened, review_requested] +jobs: + get_associated_issue: + uses: ./.github/workflows/get_issue_from_pr.yml + with: + pr_id: ${{ github.event.pull_request.number }} + secrets: inherit + move_to_review_if_not_in_draft: + needs: get_associated_issue + if: github.event.pull_request.draft == false + uses: ./.github/workflows/get_project_data_and_move_column.yml + with: + column_name: Review + issue_id: ${{ needs.get_associated_issue.outputs.issue_from_pr }} + secrets: inherit diff --git a/hyperion_other/workflows/opened_issues_to_backlog.yml b/hyperion_other/workflows/opened_issues_to_backlog.yml new file mode 100644 index 000000000..066fe2ad0 --- /dev/null +++ b/hyperion_other/workflows/opened_issues_to_backlog.yml @@ -0,0 +1,13 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Add new issues to project +on: + issues: + types: + - opened +jobs: + move_to_backlog: + uses: ./.github/workflows/get_project_data_and_move_column.yml + with: + column_name: Backlog + issue_id: ${{ github.event.issue.node_id }} + secrets: inherit diff --git a/hyperion_other/workflows/rework_prs_to_in_progress.yml b/hyperion_other/workflows/rework_prs_to_in_progress.yml new file mode 100644 index 000000000..6456dd6b9 --- /dev/null +++ b/hyperion_other/workflows/rework_prs_to_in_progress.yml @@ -0,0 +1,19 @@ +# See https://confluence.diamond.ac.uk/x/tJqQC for instructions if this needs a new token +name: Add issues with rework to in progress +on: + pull_request_review: + types: [submitted] +jobs: + get_associated_issue: + uses: ./.github/workflows/get_issue_from_pr.yml + with: + pr_id: ${{ github.event.pull_request.number }} + secrets: inherit + move_to_in_progress_if_not_approved: + needs: get_associated_issue + if: github.event.review.state == 'CHANGES_REQUESTED' + uses: ./.github/workflows/get_project_data_and_move_column.yml + with: + column_name: In Progress + issue_id: ${{ needs.get_associated_issue.outputs.issue_from_pr }} + secrets: inherit diff --git a/hyperion_other/workflows/test_data/pip_freeze.txt b/hyperion_other/workflows/test_data/pip_freeze.txt new file mode 100644 index 000000000..8f9669f0b --- /dev/null +++ b/hyperion_other/workflows/test_data/pip_freeze.txt @@ -0,0 +1,248 @@ +accessible-pygments==0.0.4 +aioca==1.7 +aiohttp==3.9.1 +aiosignal==1.3.1 +alabaster==0.7.16 +aniso8601==9.0.1 +anyio==4.2.0 +appdirs==1.4.4 +asciitree==0.3.3 +asttokens==2.4.1 +async-timeout==4.0.3 +attrs==23.2.0 +Babel==2.14.0 +beautifulsoup4==4.12.3 +bidict==0.22.1 +black==24.1.0 +blinker==1.7.0 +blueapi==0.3.15 +bluesky==1.12.0 +bluesky-kafka==0.10.0 +bluesky-live==0.0.8 +boltons==23.1.1 +build==1.0.3 +cachetools==5.3.2 +caproto==1.1.1 +certifi==2023.11.17 +cfgv==3.4.0 +chardet==5.2.0 +charset-normalizer==3.3.2 +click==8.1.3 +cloudpickle==3.0.0 +colorama==0.4.6 +comm==0.2.1 +confluent-kafka==2.3.0 +contourpy==1.2.0 +coverage==7.4.0 +cycler==0.12.1 +dask==2024.1.0 +databroker==1.2.5 +dataclasses-json==0.6.3 +decorator==5.1.1 +Deprecated==1.2.14 +diff_cover==8.0.3 +distlib==0.3.8 +dls-bluesky-core==0.0.3 +dls-dodal==1.13.1 +dnspython==2.5.0 +docopt==0.6.2 +doct==1.1.0 +docutils==0.20.1 +email-validator==2.1.0.post1 +entrypoints==0.4 +epicscorelibs==7.0.7.99.0.2 +event-model==1.19.9 +exceptiongroup==1.2.0 +executing==2.0.1 +fastapi==0.98.0 +fasteners==0.19 +filelock==3.13.1 +Flask==3.0.1 +Flask-RESTful==0.3.10 +fonttools==4.47.2 +freephil==0.2.1 +frozenlist==1.4.1 +fsspec==2023.12.2 +gitdb==4.0.11 +GitPython==3.1.41 +googleapis-common-protos==1.59.1 +graypy==2.1.0 +greenlet==3.0.3 +grpcio==1.60.0 +h11==0.14.0 +h5py==3.10.0 +hdf5plugin==4.3.0 +HeapDict==1.0.1 +historydict==1.2.6 +httpcore==1.0.2 +httptools==0.6.1 +httpx==0.26.0 +humanize==4.9.0 +-e git+ssh://git@github.com/DiamondLightSource/hyperion.git@5b88ce8b69483397adb66f78da5970a5186fcae2#egg=hyperion +identify==2.5.33 +idna==3.6 +imageio==2.33.1 +imagesize==1.4.1 +importlib-metadata==6.11.0 +importlib-resources==6.1.1 +iniconfig==2.0.0 +intake==0.6.4 +ipython==8.20.0 +ipywidgets==8.1.1 +ispyb==10.0.0 +itsdangerous==2.1.2 +jedi==0.19.1 +Jinja2==3.1.3 +jsonschema==4.21.1 +jsonschema-specifications==2023.12.1 +jupyterlab-widgets==3.0.9 +kiwisolver==1.4.5 +livereload==2.6.3 +locket==1.0.0 +MarkupSafe==2.1.4 +marshmallow==3.20.2 +matplotlib==3.8.2 +matplotlib-inline==0.1.6 +mockito==1.4.0 +mongoquery==1.4.2 +msgpack==1.0.7 +msgpack-numpy==0.4.8 +multidict==6.0.4 +mypy==1.8.0 +mypy-extensions==1.0.0 +mysql-connector-python==8.3.0 +networkx==3.2.1 +nexgen==0.8.4 +nodeenv==1.8.0 +nose2==0.14.0 +nslsii==0.9.1 +numcodecs==0.12.1 +numpy==1.26.3 +opencv-python-headless==4.9.0.80 +opentelemetry-api==1.22.0 +opentelemetry-distro==0.43b0 +opentelemetry-exporter-jaeger==1.21.0 +opentelemetry-exporter-jaeger-proto-grpc==1.21.0 +opentelemetry-exporter-jaeger-thrift==1.21.0 +opentelemetry-instrumentation==0.43b0 +opentelemetry-sdk==1.22.0 +opentelemetry-semantic-conventions==0.43b0 +ophyd==1.9.0 +ophyd-async @ git+https://github.com/bluesky/ophyd-async@ec5729640041ee5b77b4614158793af3a34cf9d8 +orjson==3.9.12 +p4p==4.1.12 +packaging==23.2 +pandas==2.2.0 +parso==0.8.3 +partd==1.4.1 +pathlib2==2.3.7.post1 +pathspec==0.12.1 +pexpect==4.9.0 +pika==1.3.2 +pillow==10.2.0 +PIMS==0.6.1 +Pint==0.23 +pipdeptree==2.13.2 +platformdirs==4.1.0 +pluggy==1.4.0 +ply==3.11 +pre-commit==3.6.0 +prettytable==3.9.0 +prompt-toolkit==3.0.43 +protobuf==4.25.2 +psutil==5.9.8 +ptyprocess==0.7.0 +pure-eval==0.2.2 +pvxslibs==1.3.1 +py==1.11.0 +pydantic==1.10.14 +pydata-sphinx-theme==0.15.2 +pyepics==3.5.2 +Pygments==2.17.2 +pymongo==4.6.1 +pyOlog==4.5.0 +pyparsing==3.1.1 +pyproject-api==1.6.1 +pyproject_hooks==1.0.0 +pyright==1.1.348 +pyright_diff_quality_plugin @ git+https://github.com/DiamondLightSource/pyright_diff_quality_plugin.git@77fc0819e708eb16ffdcaea06552a2804294b53e +pytest==7.4.4 +pytest-asyncio==0.23.3 +pytest-cov==4.1.0 +pytest-random-order==1.1.1 +python-dateutil==2.8.2 +python-dotenv==1.0.1 +python-multipart==0.0.6 +pytz==2023.3.post1 +PyYAML==6.0.1 +pyzmq==25.1.2 +redis==5.0.1 +referencing==0.32.1 +requests==2.31.0 +rpds-py==0.17.1 +ruff==0.1.14 +scanspec==0.6.5 +scipy==1.12.0 +semver==3.0.2 +setuptools-dso==2.10 +six==1.16.0 +slicerator==1.1.0 +smmap==5.0.1 +sniffio==1.3.0 +snowballstemmer==2.2.0 +soupsieve==2.5 +Sphinx==7.2.6 +sphinx-autobuild==2021.3.14 +sphinx-copybutton==0.5.2 +sphinx_design==0.5.0 +sphinxcontrib-applehelp==1.0.8 +sphinxcontrib-devhelp==1.0.6 +sphinxcontrib-htmlhelp==2.0.5 +sphinxcontrib-jsmath==1.0.1 +sphinxcontrib-qthelp==1.0.7 +sphinxcontrib-serializinghtml==1.1.10 +SQLAlchemy==1.4.51 +stack-data==0.6.3 +starlette==0.27.0 +stomp.py==8.1.0 +suitcase-mongo==0.4.0 +suitcase-msgpack==0.3.0 +suitcase-utils==0.5.4 +super-state-machine==2.0.2 +tabulate==0.9.0 +thrift==0.16.0 +tifffile==2023.12.9 +tomli==2.0.1 +toolz==0.12.1 +tornado==6.4 +tox==3.28.0 +tox-direct==0.4 +tqdm==4.66.1 +traitlets==5.14.1 +types-mock==5.1.0.20240106 +types-PyYAML==6.0.12.12 +types-requests==2.31.0.20240125 +typing-inspect==0.9.0 +typing_extensions==4.5.0 +tzdata==2023.4 +tzlocal==5.2 +ujson==5.9.0 +urllib3==2.1.0 +uvicorn==0.27.0 +uvloop==0.19.0 +virtualenv==20.25.0 +watchfiles==0.21.0 +wcwidth==0.2.13 +websocket-client==1.7.0 +websockets==12.0 +Werkzeug==3.0.1 +widgetsnbextension==4.0.9 +workflows==2.26 +wrapt==1.16.0 +xarray==2024.1.1 +yarl==1.9.4 +zarr==2.16.1 +zict==2.2.0 +zipp==3.17.0 +zmq==0.0.0 +zocalo==0.30.2 diff --git a/hyperion_other/workflows/test_data/setup.cfg b/hyperion_other/workflows/test_data/setup.cfg new file mode 100644 index 000000000..dac870988 --- /dev/null +++ b/hyperion_other/workflows/test_data/setup.cfg @@ -0,0 +1,75 @@ +[metadata] +name = hyperion +description = Unattended MX data collection using BlueSky / Ophyd +url = https://github.com/DiamondLightSource/hyperion +license = BSD 3-Clause License +long_description = file: README.rst +long_description_content_type = text/x-rst +classifiers = + Development Status :: 3 - Alpha + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + +[options] +python_requires = >=3.9 +packages = find: +package_dir = + =src +install_requires = + bluesky + pyepics + blueapi + flask-restful + ispyb + scanspec + numpy + nexgen>0.8.3 + opentelemetry-distro + opentelemetry-exporter-jaeger + ophyd + semver + dls-dodal + pydantic<2.0 # See https://github.com/DiamondLightSource/hyperion/issues/774 + scipy + pyzmq + +[options.entry_points] +console_scripts = + hyperion = hyperion.__main__:main + hyperion-callbacks = hyperion.external_interaction.callbacks.__main__:main + hyperion-generate-test-nexus = hyperion.utils.validation:generate_test_nexus + +[options.extras_require] +dev = + GitPython + black + pytest-cov + pytest-random-order + pytest-asyncio + ipython + mockito + pre-commit + mypy + matplotlib + tox + build + ruff + diff-cover + pyright + pyright_diff_quality_plugin @ git+https://github.com/DiamondLightSource/pyright_diff_quality_plugin.git + + +[options.packages.find] +where = src + +[options.package_data] +hyperion = *.txt + +[mypy] +# Ignore missing stubs for modules we use +ignore_missing_imports = True +#needed for opentelemetry +namespace_packages = true +[mypy-opentelemetry.sdk.*] +implicit_reexport = True diff --git a/hyperion_other/workflows/test_data/setup.cfg.pinned b/hyperion_other/workflows/test_data/setup.cfg.pinned new file mode 100644 index 000000000..8de01cf3a --- /dev/null +++ b/hyperion_other/workflows/test_data/setup.cfg.pinned @@ -0,0 +1,74 @@ +[metadata] +name = hyperion +description = Unattended MX data collection using BlueSky / Ophyd +url = https://github.com/DiamondLightSource/hyperion +license = BSD 3-Clause License +long_description = file: README.rst +long_description_content_type = text/x-rst +classifiers = + Development Status :: 3 - Alpha + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + +[options] +python_requires = >=3.9 +packages = find: +package_dir = + =src +install_requires = + bluesky == 1.12.0 + pyepics == 3.5.2 + blueapi == 0.3.15 + flask-restful == 0.3.10 + ispyb == 10.0.0 + scanspec == 0.6.5 + numpy == 1.26.3 + nexgen == 0.8.4 + opentelemetry-distro == 0.43b0 + opentelemetry-exporter-jaeger == 1.21.0 + ophyd == 1.9.0 + semver == 3.0.2 + dls-dodal == 1.13.1 + pydantic == 1.10.14 # See https://github.com/DiamondLightSource/hyperion/issues/774 + scipy == 1.12.0 + pyzmq + +[options.entry_points] +console_scripts = + hyperion = hyperion.__main__:main + hyperion-callbacks = hyperion.external_interaction.callbacks.__main__:main + +[options.extras_require] +dev = + GitPython + black + pytest-cov + pytest-random-order + pytest-asyncio + ipython + mockito + pre-commit + mypy + matplotlib + tox + build + ruff + diff-cover + pyright + pyright_diff_quality_plugin @ git+https://github.com/DiamondLightSource/pyright_diff_quality_plugin.git + + +[options.packages.find] +where = src + +[options.package_data] +hyperion = *.txt + +[mypy] +# Ignore missing stubs for modules we use +ignore_missing_imports = True +#needed for opentelemetry +namespace_packages = true +[mypy-opentelemetry.sdk.*] +implicit_reexport = True diff --git a/hyperion_other/workflows/test_data/setup.cfg.unpinned b/hyperion_other/workflows/test_data/setup.cfg.unpinned new file mode 100644 index 000000000..cfdf112f9 --- /dev/null +++ b/hyperion_other/workflows/test_data/setup.cfg.unpinned @@ -0,0 +1,74 @@ +[metadata] +name = hyperion +description = Unattended MX data collection using BlueSky / Ophyd +url = https://github.com/DiamondLightSource/hyperion +license = BSD 3-Clause License +long_description = file: README.rst +long_description_content_type = text/x-rst +classifiers = + Development Status :: 3 - Alpha + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + +[options] +python_requires = >=3.9 +packages = find: +package_dir = + =src +install_requires = + bluesky + pyepics + blueapi + flask-restful + ispyb + scanspec + numpy + nexgen>0.8.3 + opentelemetry-distro + opentelemetry-exporter-jaeger + ophyd + semver + dls-dodal + pydantic<2.0 # See https://github.com/DiamondLightSource/hyperion/issues/774 + scipy + pyzmq + +[options.entry_points] +console_scripts = + hyperion = hyperion.__main__:main + hyperion-callbacks = hyperion.external_interaction.callbacks.__main__:main + +[options.extras_require] +dev = + GitPython + black + pytest-cov + pytest-random-order + pytest-asyncio + ipython + mockito + pre-commit + mypy + matplotlib + tox + build + ruff + diff-cover + pyright + pyright_diff_quality_plugin @ git+https://github.com/DiamondLightSource/pyright_diff_quality_plugin.git + + +[options.packages.find] +where = src + +[options.package_data] +hyperion = *.txt + +[mypy] +# Ignore missing stubs for modules we use +ignore_missing_imports = True +#needed for opentelemetry +namespace_packages = true +[mypy-opentelemetry.sdk.*] +implicit_reexport = True diff --git a/hyperion_other/workflows/test_pin_versions.py b/hyperion_other/workflows/test_pin_versions.py new file mode 100644 index 000000000..1a6d41448 --- /dev/null +++ b/hyperion_other/workflows/test_pin_versions.py @@ -0,0 +1,79 @@ +import io +from functools import partial +from unittest.mock import MagicMock, patch + +import pin_versions +import pytest + + +@pytest.fixture +def patched_run_pip_freeze(): + with patch("pin_versions.run_pip_freeze") as run_pip_freeze: + with open("test_data/pip_freeze.txt") as freeze_output: + mock_process = MagicMock() + run_pip_freeze.return_value = mock_process + mock_process.stdout = freeze_output.read() + mock_process.returncode = 0 + yield run_pip_freeze + + +@pytest.mark.parametrize( + "input, expected", + [ + ("", ("", None)), + ( + " pydantic<2.0 # See https://github.com/DiamondLightSource/hyperion/issues/774", + ( + " pydantic<2.0 ", + " See https://github.com/DiamondLightSource/hyperion/issues/774", + ), + ), + ], +) +def test_strip_comment(input, expected): + assert pin_versions.strip_comment(input) == expected + + +@pytest.mark.parametrize( + "input, expected", + [ + ("dls-dodal", "dls-dodal"), + ("dls_dodal", "dls-dodal"), + ("dls.dodal", "dls-dodal"), + ], +) +def test_normalize(input, expected): + assert pin_versions.normalize(input) == expected + + +def test_unpin(): + with io.StringIO() as output_file: + with open("test_data/setup.cfg") as input_file: + pin_versions.process_files( + input_file, output_file, pin_versions.unpin_versions + ) + with open("test_data/setup.cfg.unpinned") as expected_file: + assert output_file.getvalue() == expected_file.read() + + +@patch("pin_versions.stdout") +def test_write_commit_message(mock_stdout, patched_run_pip_freeze): + installed_versions = pin_versions.fetch_pin_versions() + pin_versions.write_commit_message(installed_versions) + mock_stdout.write.assert_called_once_with( + "Pin dependencies prior to release. Dodal 1.13.1, nexgen 0.8.4" + ) + + +def test_pin(patched_run_pip_freeze): + installed_versions = pin_versions.fetch_pin_versions() + with io.StringIO() as output_file: + with open("test_data/setup.cfg.unpinned") as input_file: + pin_versions.process_files( + input_file, + output_file, + partial(pin_versions.update_setup_cfg_line, installed_versions), + ) + + with open("test_data/setup.cfg.pinned") as expected_file: + assert output_file.getvalue() == expected_file.read() diff --git a/pyproject.toml b/pyproject.toml index 91e000ff6..996a244e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,25 +7,47 @@ name = "mx-bluesky" classifiers = [ "Development Status :: 3 - Alpha", "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] description = "Bluesky tools for MX Beamlines at DLS" dependencies = [ - "bluesky", - "ophyd", - "pyepics", + # + # If a constraint is not set here or if the dependency is pinned to a hash + # it will be auto-pinned to the latest release version by the pre-release workflow + # + "annotated_types", "caproto", + "fastapi[all]", + "flask-restful", + "ispyb", "jupyterlab", "matplotlib", - "requests", + "nexgen", + "numpy", "opencv-python", + "opentelemetry-distro", + "opentelemetry-exporter-otlp", "pydantic", - "fastapi[all]", + "pyepics", + "pyzmq", + "requests", + "scanspec", + "scipy", + "semver", + # + # These dependencies may be issued as pre-release versions and should have a pin constraint + # as by default pip-install will not upgrade to a pre-release. + # + "blueapi == 0.4.5a1", + "daq-config-server >= 0.1.1", + "ophyd == 1.9.0", + "ophyd-async >= 0.3a5", + "bluesky >= 1.13.0a4", "dls-dodal @ git+https://github.com/DiamondLightSource/dodal.git", - "blueapi==0.4.5-a1", ] + + dynamic = ["version"] license.file = "LICENSE" readme = "README.rst" @@ -34,10 +56,17 @@ requires-python = ">=3.10" [project.optional-dependencies] dev = [ "black", + "black", + "build", "copier", + "diff-cover", "GitPython", + "ipython", + "matplotlib", + "mockito", "mypy", "myst-parser", + "ophyd-async", "pipdeptree", "pre-commit", "pydata-sphinx-theme>=0.12", @@ -49,16 +78,22 @@ dev = [ "ruff", "sphinx-autobuild", "sphinx-copybutton", + "sphinxcontrib-plantuml", "sphinx-design", "tox-direct", + "tox", "types-mock", "types-requests", ] [project.scripts] mx-bluesky = "mx_bluesky.__main__:main" -run_extruder = "mx_bluesky.i24.serial.run_serial:run_extruder" -run_fixed_target = "mx_bluesky.i24.serial.run_serial:run_fixed_target" +run_extruder = "mx_bluesky.beamlines.i24.serial.run_serial:run_extruder" +run_fixed_target = "mx_bluesky.beamlines.i24.serial.run_serial:run_fixed_target" +hyperion = "mx_bluesky.hyperion.__main__:main" +hyperion-callbacks = "mx_bluesky.hyperion.external_interaction.callbacks.__main__:main" +hyperion-generate-test-nexus = "mx_bluesky.hyperion.utils.validation:generate_test_nexus" +hyperion-populate-test-and-meta-files = "mx_bluesky.hyperion.utils.validation:copy_test_meta_data_files" [project.urls] GitHub = "https://github.com/DiamondLightSource/mx-bluesky" @@ -77,6 +112,11 @@ reportMissingImports = false # Ignore missing stubs in imported modules [tool.pytest.ini_options] # Run pytest with all our checkers, and don't spam us with massive tracebacks on error asyncio_mode = "auto" +markers = [ + "s03: marks tests as requiring the s03 simulator running (deselect with '-m \"not s03\"')", + "dlstbx: marks tests as requiring dlstbx (deselect with '-m \"not dlstbx\"')", + "skip_log_setup: marks tests so that loggers are not setup before the test.", +] addopts = """ --tb=native -vv --doctest-modules --doctest-glob="*.rst" """ @@ -85,9 +125,17 @@ filterwarnings = [ "error", # Ignore deprecation warning from zocalo "ignore:((.|\n)*)was never iterated.*:RuntimeWarning", + # Ignore deprecation warning from sqlalchemy + "ignore::sqlalchemy.exc.MovedIn20Warning", + # Ophyd status objects in tests + "ignore:.*The 'done' and 'success' parameters will be removed.*:DeprecationWarning", + # Ignore nondeterministic closing of log, RE, etc. before test end + "ignore:(.*)unclosed file(.*)name='(.*)dodal.log'(.*):ResourceWarning", + "ignore:(.*)unclosed $start_log_path 2>&1 & + if [ $EXTERNAL_CALLBACK_SERVICE == true ]; then + hyperion-callbacks `echo $cb_commands;`>$callback_start_log_path 2>&1 & + fi + echo "$(date) Waiting for Hyperion to start" + + for i in {1..30} + do + echo "$(date)" + curl --head -X GET http://localhost:5005/status >/dev/null + ret_value=$? + if [ $ret_value -ne 0 ]; then + sleep 1 + else + break + fi + done + + if [ $ret_value -ne 0 ]; then + echo "$(date) Hyperion Failed to start!!!!" + exit 1 + else + echo "$(date) Hyperion started" + fi +fi + +sleep 1 diff --git a/src/mx_bluesky/i24/__init__.py b/src/mx_bluesky/beamlines/__init__.py similarity index 100% rename from src/mx_bluesky/i24/__init__.py rename to src/mx_bluesky/beamlines/__init__.py diff --git a/src/mx_bluesky/beamlines/i04/__init__.py b/src/mx_bluesky/beamlines/i04/__init__.py new file mode 100644 index 000000000..8fd3d8594 --- /dev/null +++ b/src/mx_bluesky/beamlines/i04/__init__.py @@ -0,0 +1,3 @@ +from mx_bluesky.beamlines.i04.thawing_plan import thaw, thaw_and_center + +__all__ = ["thaw", "thaw_and_center"] diff --git a/src/mx_bluesky/i04/callbacks/murko_callback.py b/src/mx_bluesky/beamlines/i04/callbacks/murko_callback.py similarity index 100% rename from src/mx_bluesky/i04/callbacks/murko_callback.py rename to src/mx_bluesky/beamlines/i04/callbacks/murko_callback.py diff --git a/src/mx_bluesky/i04/thawing_plan.py b/src/mx_bluesky/beamlines/i04/thawing_plan.py similarity index 97% rename from src/mx_bluesky/i04/thawing_plan.py rename to src/mx_bluesky/beamlines/i04/thawing_plan.py index baa49edb5..2d5538a2d 100644 --- a/src/mx_bluesky/i04/thawing_plan.py +++ b/src/mx_bluesky/beamlines/i04/thawing_plan.py @@ -10,7 +10,7 @@ from dodal.devices.smargon import Smargon from dodal.devices.thawer import Thawer, ThawerStates -from mx_bluesky.i04.callbacks.murko_callback import MurkoCallback +from mx_bluesky.beamlines.i04.callbacks.murko_callback import MurkoCallback def thaw_and_center( diff --git a/src/mx_bluesky/i24/serial/extruder/__init__.py b/src/mx_bluesky/beamlines/i24/__init__.py similarity index 100% rename from src/mx_bluesky/i24/serial/extruder/__init__.py rename to src/mx_bluesky/beamlines/i24/__init__.py diff --git a/src/mx_bluesky/i24/serial/__init__.py b/src/mx_bluesky/beamlines/i24/serial/__init__.py similarity index 100% rename from src/mx_bluesky/i24/serial/__init__.py rename to src/mx_bluesky/beamlines/i24/serial/__init__.py diff --git a/src/mx_bluesky/i24/serial/blueapi_config.yaml b/src/mx_bluesky/beamlines/i24/serial/blueapi_config.yaml similarity index 81% rename from src/mx_bluesky/i24/serial/blueapi_config.yaml rename to src/mx_bluesky/beamlines/i24/serial/blueapi_config.yaml index 304352360..9831f5ae9 100644 --- a/src/mx_bluesky/i24/serial/blueapi_config.yaml +++ b/src/mx_bluesky/beamlines/i24/serial/blueapi_config.yaml @@ -3,7 +3,7 @@ env: - kind: dodal module: dodal.beamlines.i24 - kind: planFunctions - module: mx_bluesky.i24.serial + module: mx_bluesky.beamlines.i24.serial events: broadcast_status_events: False api: diff --git a/src/mx_bluesky/i24/serial/dcid.py b/src/mx_bluesky/beamlines/i24/serial/dcid.py similarity index 99% rename from src/mx_bluesky/i24/serial/dcid.py rename to src/mx_bluesky/beamlines/i24/serial/dcid.py index 9514dc2ee..a5784ecc8 100644 --- a/src/mx_bluesky/i24/serial/dcid.py +++ b/src/mx_bluesky/beamlines/i24/serial/dcid.py @@ -10,8 +10,8 @@ import requests -from mx_bluesky.i24.serial.parameters import SSXType -from mx_bluesky.i24.serial.setup_beamline import ( +from mx_bluesky.beamlines.i24.serial.parameters import SSXType +from mx_bluesky.beamlines.i24.serial.setup_beamline import ( Detector, Eiger, Pilatus, diff --git a/src/mx_bluesky/i24/serial/extruder/EX-gui-edm/DetStage.edl b/src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/DetStage.edl similarity index 100% rename from src/mx_bluesky/i24/serial/extruder/EX-gui-edm/DetStage.edl rename to src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/DetStage.edl diff --git a/src/mx_bluesky/i24/serial/extruder/EX-gui-edm/DiamondExtruder-I24-py3v1.edl b/src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/DiamondExtruder-I24-py3v1.edl similarity index 100% rename from src/mx_bluesky/i24/serial/extruder/EX-gui-edm/DiamondExtruder-I24-py3v1.edl rename to src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/DiamondExtruder-I24-py3v1.edl diff --git a/src/mx_bluesky/i24/serial/extruder/EX-gui-edm/microdrop_alignment.edl b/src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/microdrop_alignment.edl similarity index 100% rename from src/mx_bluesky/i24/serial/extruder/EX-gui-edm/microdrop_alignment.edl rename to src/mx_bluesky/beamlines/i24/serial/extruder/EX-gui-edm/microdrop_alignment.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/__init__.py b/src/mx_bluesky/beamlines/i24/serial/extruder/__init__.py similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/__init__.py rename to src/mx_bluesky/beamlines/i24/serial/extruder/__init__.py diff --git a/src/mx_bluesky/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py b/src/mx_bluesky/beamlines/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py similarity index 96% rename from src/mx_bluesky/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py rename to src/mx_bluesky/beamlines/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py index 9362ab886..ce04bbb8d 100755 --- a/src/mx_bluesky/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py +++ b/src/mx_bluesky/beamlines/i24/serial/extruder/i24ssx_Extruder_Collect_py3v2.py @@ -27,17 +27,20 @@ from dodal.devices.i24.i24_detector_motion import DetectorMotion from dodal.devices.zebra import DISCONNECT, SOFT_IN3, Zebra -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.dcid import DCID -from mx_bluesky.i24.serial.parameters import ExtruderParameters, SSXType -from mx_bluesky.i24.serial.parameters.constants import PARAM_FILE_NAME, PARAM_FILE_PATH -from mx_bluesky.i24.serial.setup_beamline import Pilatus, caget, caput, pv -from mx_bluesky.i24.serial.setup_beamline import setup_beamline as sup -from mx_bluesky.i24.serial.setup_beamline.setup_detector import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.dcid import DCID +from mx_bluesky.beamlines.i24.serial.parameters import ExtruderParameters, SSXType +from mx_bluesky.beamlines.i24.serial.parameters.constants import ( + PARAM_FILE_NAME, + PARAM_FILE_PATH, +) +from mx_bluesky.beamlines.i24.serial.setup_beamline import Pilatus, caget, caput, pv +from mx_bluesky.beamlines.i24.serial.setup_beamline import setup_beamline as sup +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector import ( UnknownDetectorType, get_detector_type, ) -from mx_bluesky.i24.serial.setup_beamline.setup_zebra_plans import ( +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_zebra_plans import ( GATE_START, TTL_EIGER, TTL_PILATUS, @@ -49,7 +52,7 @@ setup_zebra_for_extruder_with_pump_probe_plan, setup_zebra_for_quickshot_plan, ) -from mx_bluesky.i24.serial.write_nexus import call_nexgen +from mx_bluesky.beamlines.i24.serial.write_nexus import call_nexgen usage = "%(prog)s command [options]" logger = logging.getLogger("I24ssx.extruder") diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/CustomChip_py3v1.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/CustomChip_py3v1.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/CustomChip_py3v1.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/CustomChip_py3v1.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/DetStage.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DetStage.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/DetStage.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DetStage.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/DiamondChipI24-py3v1.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DiamondChipI24-py3v1.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/DiamondChipI24-py3v1.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DiamondChipI24-py3v1.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/ME14E-GeneralPurpose.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/ME14E-GeneralPurpose.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/ME14E-GeneralPurpose.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/ME14E-GeneralPurpose.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/MappingLite-oxford_py3v1.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/MappingLite-oxford_py3v1.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/MappingLite-oxford_py3v1.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/MappingLite-oxford_py3v1.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/PMAC_Command.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/PMAC_Command.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/PMAC_Command.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/PMAC_Command.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/Shutter_Control.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/Shutter_Control.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/Shutter_Control.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/Shutter_Control.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/microdrop_alignment.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/microdrop_alignment.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/microdrop_alignment.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/microdrop_alignment.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/nudgechip.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/nudgechip.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/nudgechip.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/nudgechip.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/pumpprobe-py3v1.edl b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/pumpprobe-py3v1.edl similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/pumpprobe-py3v1.edl rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/pumpprobe-py3v1.edl diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/short1-laser.png b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/short1-laser.png similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/short1-laser.png rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/short1-laser.png diff --git a/src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/short2-laser.png b/src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/short2-laser.png similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/FT-gui-edm/short2-laser.png rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/short2-laser.png diff --git a/tests/i24/serial/__init__.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/__init__.py similarity index 100% rename from tests/i24/serial/__init__.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/__init__.py diff --git a/src/mx_bluesky/i24/serial/fixed_target/ft_utils.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/ft_utils.py similarity index 100% rename from src/mx_bluesky/i24/serial/fixed_target/ft_utils.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/ft_utils.py diff --git a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py similarity index 97% rename from src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py index ff8692daa..dc0f67bb7 100755 --- a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py +++ b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Collect_py3v1.py @@ -23,26 +23,26 @@ from dodal.devices.i24.pmac import PMAC from dodal.devices.zebra import Zebra -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.dcid import DCID -from mx_bluesky.i24.serial.fixed_target.ft_utils import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.dcid import DCID +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ( ChipType, MappingType, PumpProbeSetting, ) -from mx_bluesky.i24.serial.parameters import ( +from mx_bluesky.beamlines.i24.serial.parameters import ( ChipDescription, FixedTargetParameters, SSXType, ) -from mx_bluesky.i24.serial.parameters.constants import ( +from mx_bluesky.beamlines.i24.serial.parameters.constants import ( LITEMAP_PATH, PARAM_FILE_NAME, PARAM_FILE_PATH_FT, ) -from mx_bluesky.i24.serial.setup_beamline import caget, cagetstring, caput, pv -from mx_bluesky.i24.serial.setup_beamline import setup_beamline as sup -from mx_bluesky.i24.serial.setup_beamline.setup_zebra_plans import ( +from mx_bluesky.beamlines.i24.serial.setup_beamline import caget, cagetstring, caput, pv +from mx_bluesky.beamlines.i24.serial.setup_beamline import setup_beamline as sup +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_zebra_plans import ( SHUTTER_OPEN_TIME, arm_zebra, close_fast_shutter, @@ -51,7 +51,7 @@ reset_zebra_when_collection_done_plan, setup_zebra_for_fastchip_plan, ) -from mx_bluesky.i24.serial.write_nexus import call_nexgen +from mx_bluesky.beamlines.i24.serial.write_nexus import call_nexgen ABORTED = False diff --git a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py similarity index 98% rename from src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py index 69e1db362..f8d4209fe 100755 --- a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py +++ b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Manager_py3v1.py @@ -23,16 +23,20 @@ from dodal.devices.i24.i24_detector_motion import DetectorMotion from dodal.devices.i24.pmac import PMAC, EncReset, LaserSettings -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.fixed_target import i24ssx_Chip_Mapping_py3v1 as mapping -from mx_bluesky.i24.serial.fixed_target import i24ssx_Chip_StartUp_py3v1 as startup -from mx_bluesky.i24.serial.fixed_target.ft_utils import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.fixed_target import ( + i24ssx_Chip_Mapping_py3v1 as mapping, +) +from mx_bluesky.beamlines.i24.serial.fixed_target import ( + i24ssx_Chip_StartUp_py3v1 as startup, +) +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ( ChipType, Fiducials, MappingType, ) -from mx_bluesky.i24.serial.parameters import get_chip_format -from mx_bluesky.i24.serial.parameters.constants import ( +from mx_bluesky.beamlines.i24.serial.parameters import get_chip_format +from mx_bluesky.beamlines.i24.serial.parameters.constants import ( CS_FILES_PATH, FULLMAP_PATH, LITEMAP_PATH, @@ -40,8 +44,10 @@ PARAM_FILE_PATH_FT, PVAR_FILE_PATH, ) -from mx_bluesky.i24.serial.setup_beamline import Pilatus, caget, caput, pv -from mx_bluesky.i24.serial.setup_beamline.setup_detector import get_detector_type +from mx_bluesky.beamlines.i24.serial.setup_beamline import Pilatus, caget, caput, pv +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector import ( + get_detector_type, +) logger = logging.getLogger("I24ssx.chip_manager") diff --git a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py similarity index 93% rename from src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py index 14c0e368b..f597376ad 100755 --- a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py +++ b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_Mapping_py3v1.py @@ -10,17 +10,17 @@ import numpy as np from matplotlib import pyplot as plt -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType -from mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1 import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType +from mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1 import ( check_files, get_shot_order, get_xy, read_parameter_file, write_file, ) -from mx_bluesky.i24.serial.parameters import get_chip_format -from mx_bluesky.i24.serial.parameters.constants import PARAM_FILE_PATH_FT +from mx_bluesky.beamlines.i24.serial.parameters import get_chip_format +from mx_bluesky.beamlines.i24.serial.parameters.constants import PARAM_FILE_PATH_FT logger = logging.getLogger("I24ssx.chip_mapping") diff --git a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py similarity index 97% rename from src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py index 8da537ebd..10678a17d 100755 --- a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py +++ b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_Chip_StartUp_py3v1.py @@ -10,10 +10,13 @@ import numpy as np -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType -from mx_bluesky.i24.serial.parameters import FixedTargetParameters, get_chip_format -from mx_bluesky.i24.serial.parameters.constants import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType +from mx_bluesky.beamlines.i24.serial.parameters import ( + FixedTargetParameters, + get_chip_format, +) +from mx_bluesky.beamlines.i24.serial.parameters.constants import ( HEADER_FILES_PATH, PARAM_FILE_NAME, PARAM_FILE_PATH_FT, diff --git a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_moveonclick.py b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_moveonclick.py similarity index 95% rename from src/mx_bluesky/i24/serial/fixed_target/i24ssx_moveonclick.py rename to src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_moveonclick.py index a62edeaff..c9bb0dbd9 100755 --- a/src/mx_bluesky/i24/serial/fixed_target/i24ssx_moveonclick.py +++ b/src/mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_moveonclick.py @@ -12,9 +12,11 @@ from dodal.devices.i24.pmac import PMAC from dodal.devices.oav.oav_detector import OAV -from mx_bluesky.i24.serial.fixed_target import i24ssx_Chip_Manager_py3v1 as manager -from mx_bluesky.i24.serial.fixed_target.ft_utils import Fiducials -from mx_bluesky.i24.serial.parameters.constants import OAV1_CAM +from mx_bluesky.beamlines.i24.serial.fixed_target import ( + i24ssx_Chip_Manager_py3v1 as manager, +) +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import Fiducials +from mx_bluesky.beamlines.i24.serial.parameters.constants import OAV1_CAM logger = logging.getLogger("I24ssx.moveonclick") diff --git a/src/mx_bluesky/i24/serial/log.py b/src/mx_bluesky/beamlines/i24/serial/log.py similarity index 100% rename from src/mx_bluesky/i24/serial/log.py rename to src/mx_bluesky/beamlines/i24/serial/log.py diff --git a/src/mx_bluesky/beamlines/i24/serial/parameters/__init__.py b/src/mx_bluesky/beamlines/i24/serial/parameters/__init__.py new file mode 100644 index 000000000..c1e507329 --- /dev/null +++ b/src/mx_bluesky/beamlines/i24/serial/parameters/__init__.py @@ -0,0 +1,15 @@ +from mx_bluesky.beamlines.i24.serial.parameters.constants import SSXType +from mx_bluesky.beamlines.i24.serial.parameters.experiment_parameters import ( + ChipDescription, + ExtruderParameters, + FixedTargetParameters, +) +from mx_bluesky.beamlines.i24.serial.parameters.utils import get_chip_format + +__all__ = [ + "SSXType", + "ExtruderParameters", + "ChipDescription", + "FixedTargetParameters", + "get_chip_format", +] diff --git a/src/mx_bluesky/i24/serial/parameters/constants.py b/src/mx_bluesky/beamlines/i24/serial/parameters/constants.py similarity index 94% rename from src/mx_bluesky/i24/serial/parameters/constants.py rename to src/mx_bluesky/beamlines/i24/serial/parameters/constants.py index 231260ab5..12cac6fce 100644 --- a/src/mx_bluesky/i24/serial/parameters/constants.py +++ b/src/mx_bluesky/beamlines/i24/serial/parameters/constants.py @@ -2,7 +2,7 @@ from os import environ from pathlib import Path -from mx_bluesky.i24.serial.log import _read_visit_directory_from_file +from mx_bluesky.beamlines.i24.serial.log import _read_visit_directory_from_file class SSXType(Enum): diff --git a/src/mx_bluesky/i24/serial/parameters/experiment_parameters.py b/src/mx_bluesky/beamlines/i24/serial/parameters/experiment_parameters.py similarity index 98% rename from src/mx_bluesky/i24/serial/parameters/experiment_parameters.py rename to src/mx_bluesky/beamlines/i24/serial/parameters/experiment_parameters.py index b418f8713..923b29afa 100644 --- a/src/mx_bluesky/i24/serial/parameters/experiment_parameters.py +++ b/src/mx_bluesky/beamlines/i24/serial/parameters/experiment_parameters.py @@ -4,7 +4,7 @@ from pydantic import BaseModel, ConfigDict, validator -from mx_bluesky.i24.serial.fixed_target.ft_utils import ( +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ( ChipType, MappingType, PumpProbeSetting, diff --git a/src/mx_bluesky/i24/serial/parameters/fixed_target/cs/cs_maker.json b/src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/cs/cs_maker.json similarity index 100% rename from src/mx_bluesky/i24/serial/parameters/fixed_target/cs/cs_maker.json rename to src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/cs/cs_maker.json diff --git a/src/mx_bluesky/i24/serial/parameters/fixed_target/cs/motor_direction.txt b/src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/cs/motor_direction.txt similarity index 100% rename from src/mx_bluesky/i24/serial/parameters/fixed_target/cs/motor_direction.txt rename to src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/cs/motor_direction.txt diff --git a/src/mx_bluesky/i24/serial/parameters/fixed_target/pvar_files/minichip-oxford.pvar b/src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/pvar_files/minichip-oxford.pvar similarity index 100% rename from src/mx_bluesky/i24/serial/parameters/fixed_target/pvar_files/minichip-oxford.pvar rename to src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/pvar_files/minichip-oxford.pvar diff --git a/src/mx_bluesky/i24/serial/parameters/fixed_target/pvar_files/oxford.pvar b/src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/pvar_files/oxford.pvar similarity index 100% rename from src/mx_bluesky/i24/serial/parameters/fixed_target/pvar_files/oxford.pvar rename to src/mx_bluesky/beamlines/i24/serial/parameters/fixed_target/pvar_files/oxford.pvar diff --git a/src/mx_bluesky/i24/serial/parameters/utils.py b/src/mx_bluesky/beamlines/i24/serial/parameters/utils.py similarity index 88% rename from src/mx_bluesky/i24/serial/parameters/utils.py rename to src/mx_bluesky/beamlines/i24/serial/parameters/utils.py index b1dd2ea0f..807784929 100644 --- a/src/mx_bluesky/i24/serial/parameters/utils.py +++ b/src/mx_bluesky/beamlines/i24/serial/parameters/utils.py @@ -1,8 +1,10 @@ from typing import Any -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType -from mx_bluesky.i24.serial.parameters.experiment_parameters import ChipDescription -from mx_bluesky.i24.serial.setup_beamline import caget, pv +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType +from mx_bluesky.beamlines.i24.serial.parameters.experiment_parameters import ( + ChipDescription, +) +from mx_bluesky.beamlines.i24.serial.setup_beamline import caget, pv def get_chip_format(chip_type: ChipType) -> ChipDescription: diff --git a/src/mx_bluesky/i24/serial/run_extruder.sh b/src/mx_bluesky/beamlines/i24/serial/run_extruder.sh similarity index 100% rename from src/mx_bluesky/i24/serial/run_extruder.sh rename to src/mx_bluesky/beamlines/i24/serial/run_extruder.sh diff --git a/src/mx_bluesky/i24/serial/run_fixed_target.sh b/src/mx_bluesky/beamlines/i24/serial/run_fixed_target.sh similarity index 100% rename from src/mx_bluesky/i24/serial/run_fixed_target.sh rename to src/mx_bluesky/beamlines/i24/serial/run_fixed_target.sh diff --git a/src/mx_bluesky/i24/serial/run_serial.py b/src/mx_bluesky/beamlines/i24/serial/run_serial.py similarity index 100% rename from src/mx_bluesky/i24/serial/run_serial.py rename to src/mx_bluesky/beamlines/i24/serial/run_serial.py diff --git a/src/mx_bluesky/i24/serial/run_ssx.sh b/src/mx_bluesky/beamlines/i24/serial/run_ssx.sh similarity index 100% rename from src/mx_bluesky/i24/serial/run_ssx.sh rename to src/mx_bluesky/beamlines/i24/serial/run_ssx.sh diff --git a/src/mx_bluesky/i24/serial/set_visit_directory.sh b/src/mx_bluesky/beamlines/i24/serial/set_visit_directory.sh similarity index 100% rename from src/mx_bluesky/i24/serial/set_visit_directory.sh rename to src/mx_bluesky/beamlines/i24/serial/set_visit_directory.sh diff --git a/src/mx_bluesky/i24/serial/setup_beamline/__init__.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/__init__.py similarity index 100% rename from src/mx_bluesky/i24/serial/setup_beamline/__init__.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/__init__.py diff --git a/src/mx_bluesky/i24/serial/setup_beamline/ca.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/ca.py similarity index 100% rename from src/mx_bluesky/i24/serial/setup_beamline/ca.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/ca.py diff --git a/src/mx_bluesky/i24/serial/setup_beamline/pv.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/pv.py similarity index 100% rename from src/mx_bluesky/i24/serial/setup_beamline/pv.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/pv.py diff --git a/src/mx_bluesky/i24/serial/setup_beamline/pv_abstract.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/pv_abstract.py similarity index 96% rename from src/mx_bluesky/i24/serial/setup_beamline/pv_abstract.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/pv_abstract.py index 852e13100..ca3816a55 100644 --- a/src/mx_bluesky/i24/serial/setup_beamline/pv_abstract.py +++ b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/pv_abstract.py @@ -5,7 +5,7 @@ abstract wrapper around them. """ -from mx_bluesky.i24.serial.setup_beamline import pv +from mx_bluesky.beamlines.i24.serial.setup_beamline import pv class Pilatus: diff --git a/src/mx_bluesky/i24/serial/setup_beamline/setup_beamline.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_beamline.py similarity index 99% rename from src/mx_bluesky/i24/serial/setup_beamline/setup_beamline.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_beamline.py index 08b605c3d..11e5b59c0 100644 --- a/src/mx_bluesky/i24/serial/setup_beamline/setup_beamline.py +++ b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_beamline.py @@ -7,8 +7,8 @@ from dodal.devices.i24.dual_backlight import BacklightPositions, DualBacklight from dodal.devices.i24.i24_detector_motion import DetectorMotion -from mx_bluesky.i24.serial.setup_beamline import pv -from mx_bluesky.i24.serial.setup_beamline.ca import caget, caput +from mx_bluesky.beamlines.i24.serial.setup_beamline import pv +from mx_bluesky.beamlines.i24.serial.setup_beamline.ca import caget, caput logger = logging.getLogger("I24ssx.sup") diff --git a/src/mx_bluesky/i24/serial/setup_beamline/setup_detector.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_detector.py similarity index 90% rename from src/mx_bluesky/i24/serial/setup_beamline/setup_detector.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_detector.py index 0dc60fa9c..df6323c6e 100644 --- a/src/mx_bluesky/i24/serial/setup_beamline/setup_detector.py +++ b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_detector.py @@ -13,11 +13,11 @@ from dodal.common import inject from dodal.devices.i24.i24_detector_motion import DetectorMotion -from mx_bluesky.i24.serial import log -from mx_bluesky.i24.serial.parameters import SSXType -from mx_bluesky.i24.serial.setup_beamline import pv -from mx_bluesky.i24.serial.setup_beamline.ca import caget -from mx_bluesky.i24.serial.setup_beamline.pv_abstract import ( +from mx_bluesky.beamlines.i24.serial import log +from mx_bluesky.beamlines.i24.serial.parameters import SSXType +from mx_bluesky.beamlines.i24.serial.setup_beamline import pv +from mx_bluesky.beamlines.i24.serial.setup_beamline.ca import caget +from mx_bluesky.beamlines.i24.serial.setup_beamline.pv_abstract import ( Detector, Eiger, Pilatus, diff --git a/src/mx_bluesky/i24/serial/setup_beamline/setup_zebra_plans.py b/src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_zebra_plans.py similarity index 100% rename from src/mx_bluesky/i24/serial/setup_beamline/setup_zebra_plans.py rename to src/mx_bluesky/beamlines/i24/serial/setup_beamline/setup_zebra_plans.py diff --git a/src/mx_bluesky/i24/serial/start_blueapi.sh b/src/mx_bluesky/beamlines/i24/serial/start_blueapi.sh similarity index 100% rename from src/mx_bluesky/i24/serial/start_blueapi.sh rename to src/mx_bluesky/beamlines/i24/serial/start_blueapi.sh diff --git a/src/mx_bluesky/i24/serial/write_nexus.py b/src/mx_bluesky/beamlines/i24/serial/write_nexus.py similarity index 93% rename from src/mx_bluesky/i24/serial/write_nexus.py rename to src/mx_bluesky/beamlines/i24/serial/write_nexus.py index 5539e69bb..384394125 100644 --- a/src/mx_bluesky/i24/serial/write_nexus.py +++ b/src/mx_bluesky/beamlines/i24/serial/write_nexus.py @@ -8,9 +8,12 @@ import requests -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType, MappingType -from mx_bluesky.i24.serial.parameters import ExtruderParameters, FixedTargetParameters -from mx_bluesky.i24.serial.setup_beamline import Eiger, caget, cagetstring +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType, MappingType +from mx_bluesky.beamlines.i24.serial.parameters import ( + ExtruderParameters, + FixedTargetParameters, +) +from mx_bluesky.beamlines.i24.serial.setup_beamline import Eiger, caget, cagetstring logger = logging.getLogger("I24ssx.nexus_writer") diff --git a/src/mx_bluesky/hyperion/__init__.py b/src/mx_bluesky/hyperion/__init__.py new file mode 100644 index 000000000..f33698d1f --- /dev/null +++ b/src/mx_bluesky/hyperion/__init__.py @@ -0,0 +1 @@ +# placeholder file to start layout diff --git a/src/mx_bluesky/hyperion/__main__.py b/src/mx_bluesky/hyperion/__main__.py new file mode 100755 index 000000000..dae8ffc3d --- /dev/null +++ b/src/mx_bluesky/hyperion/__main__.py @@ -0,0 +1,368 @@ +import atexit +import json +import threading +from collections.abc import Callable +from dataclasses import asdict +from queue import Queue +from traceback import format_exception +from typing import Any + +from blueapi.core import BlueskyContext, MsgGenerator +from bluesky.callbacks.zmq import Publisher +from bluesky.run_engine import RunEngine +from flask import Flask, request +from flask_restful import Api, Resource +from pydantic.dataclasses import dataclass + +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.experiment_registry import ( + PLAN_REGISTRY, + PlanNotFound, +) +from mx_bluesky.hyperion.external_interaction.callbacks.__main__ import ( + setup_logging as setup_callback_logging, +) +from mx_bluesky.hyperion.external_interaction.callbacks.aperture_change_callback import ( + ApertureChangeCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + CallbacksFactory, +) +from mx_bluesky.hyperion.external_interaction.callbacks.log_uid_tag_callback import ( + LogUidTaggingCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.logging_callback import ( + VerbosePlanExecutionLoggingCallback, +) +from mx_bluesky.hyperion.log import ( + LOGGER, + do_default_logging_setup, + flush_debug_handler, +) +from mx_bluesky.hyperion.parameters.cli import parse_cli_args +from mx_bluesky.hyperion.parameters.components import HyperionParameters +from mx_bluesky.hyperion.parameters.constants import CONST, Actions, Status +from mx_bluesky.hyperion.tracing import TRACER +from mx_bluesky.hyperion.utils.context import setup_context + +VERBOSE_EVENT_LOGGING: bool | None = None + + +@dataclass +class Command: + action: Actions + devices: Any | None = None + experiment: Callable[[Any, Any], MsgGenerator] | None = None + parameters: HyperionParameters | None = None + callbacks: CallbacksFactory | None = None + + +@dataclass +class StatusAndMessage: + status: str + message: str = "" + + def __init__(self, status: Status, message: str = "") -> None: + self.status = status.value + self.message = message + + +@dataclass +class ErrorStatusAndMessage(StatusAndMessage): + exception_type: str = "" + + def __init__(self, exception: Exception) -> None: + super().__init__(Status.FAILED, repr(exception)) + self.exception_type = type(exception).__name__ + + +class BlueskyRunner: + def __init__( + self, + RE: RunEngine, + context: BlueskyContext, + skip_startup_connection=False, + use_external_callbacks: bool = False, + ) -> None: + self.command_queue: Queue[Command] = Queue() + self.current_status: StatusAndMessage = StatusAndMessage(Status.IDLE) + self.last_run_aborted: bool = False + self.aperture_change_callback = ApertureChangeCallback() + self.logging_uid_tag_callback = LogUidTaggingCallback() + self.context: BlueskyContext + + self.RE = RE + self.context = context + self.subscribed_per_plan_callbacks: list[int] = [] + RE.subscribe(self.aperture_change_callback) + RE.subscribe(self.logging_uid_tag_callback) + + self.use_external_callbacks = use_external_callbacks + if self.use_external_callbacks: + LOGGER.info("Connecting to external callback ZMQ proxy...") + self.publisher = Publisher(f"localhost:{CONST.CALLBACK_0MQ_PROXY_PORTS[0]}") + RE.subscribe(self.publisher) + + if VERBOSE_EVENT_LOGGING: + RE.subscribe(VerbosePlanExecutionLoggingCallback()) + + self.skip_startup_connection = skip_startup_connection + if not self.skip_startup_connection: + LOGGER.info("Initialising dodal devices...") + for plan_name in PLAN_REGISTRY: + PLAN_REGISTRY[plan_name]["setup"](context) + + def start( + self, + experiment: Callable, + parameters: HyperionParameters, + plan_name: str, + callbacks: CallbacksFactory | None, + ) -> StatusAndMessage: + LOGGER.info(f"Started with parameters: {parameters.json(indent=2)}") + + devices: Any = PLAN_REGISTRY[plan_name]["setup"](self.context) + + if ( + self.current_status.status == Status.BUSY.value + or self.current_status.status == Status.ABORTING.value + ): + return StatusAndMessage(Status.FAILED, "Bluesky already running") + else: + self.current_status = StatusAndMessage(Status.BUSY) + self.command_queue.put( + Command( + action=Actions.START, + devices=devices, + experiment=experiment, + parameters=parameters, + callbacks=callbacks, + ) + ) + return StatusAndMessage(Status.SUCCESS) + + def stopping_thread(self): + try: + self.RE.abort() + self.current_status = StatusAndMessage(Status.IDLE) + except Exception as e: + self.current_status = ErrorStatusAndMessage(e) + + def stop(self) -> StatusAndMessage: + if self.current_status.status == Status.IDLE.value: + return StatusAndMessage(Status.FAILED, "Bluesky not running") + elif self.current_status.status == Status.ABORTING.value: + return StatusAndMessage(Status.FAILED, "Bluesky already stopping") + else: + self.current_status = StatusAndMessage(Status.ABORTING) + stopping_thread = threading.Thread(target=self.stopping_thread) + stopping_thread.start() + self.last_run_aborted = True + return StatusAndMessage(Status.ABORTING) + + def shutdown(self): + """Stops the run engine and the loop waiting for messages.""" + print("Shutting down: Stopping the run engine gracefully") + self.stop() + self.command_queue.put(Command(action=Actions.SHUTDOWN)) + + def wait_on_queue(self): + while True: + command = self.command_queue.get() + if command.action == Actions.SHUTDOWN: + return + elif command.action == Actions.START: + if command.experiment is None: + raise ValueError("No experiment provided for START") + try: + if ( + not self.use_external_callbacks + and command.callbacks + and (cbs := command.callbacks()) + ): + LOGGER.info( + f"Using callbacks for this plan: {not self.use_external_callbacks} - {cbs}" + ) + self.subscribed_per_plan_callbacks += [ + self.RE.subscribe(cb) for cb in cbs + ] + with TRACER.start_span("do_run"): + self.RE(command.experiment(command.devices, command.parameters)) + + self.current_status = StatusAndMessage( + Status.IDLE, + self.aperture_change_callback.last_selected_aperture, + ) + + self.last_run_aborted = False + except WarningException as exception: + LOGGER.warning("Warning Exception", exc_info=True) + self.current_status = ErrorStatusAndMessage(exception) + except Exception as exception: + LOGGER.error("Exception on running plan", exc_info=True) + + if self.last_run_aborted: + # Aborting will cause an exception here that we want to swallow + self.last_run_aborted = False + else: + self.current_status = ErrorStatusAndMessage(exception) + finally: + [ + self.RE.unsubscribe(cb) + for cb in self.subscribed_per_plan_callbacks + ] + + +def compose_start_args(context: BlueskyContext, plan_name: str, action: Actions): + experiment_registry_entry = PLAN_REGISTRY.get(plan_name) + if experiment_registry_entry is None: + raise PlanNotFound(f"Experiment plan '{plan_name}' not found in registry.") + + experiment_internal_param_type = experiment_registry_entry.get("param_type") + callback_type = experiment_registry_entry.get("callback_collection_type") + plan = context.plan_functions.get(plan_name) + if experiment_internal_param_type is None: + raise PlanNotFound( + f"Corresponding internal param type for '{plan_name}' not found in registry." + ) + if plan is None: + raise PlanNotFound( + f"Experiment plan '{plan_name}' not found in context. Context has {context.plan_functions.keys()}" + ) + try: + parameters = experiment_internal_param_type(**json.loads(request.data)) + except Exception as e: + raise ValueError( + f"Supplied parameters don't match the plan for this endpoint {request.data}" + ) from e + return plan, parameters, plan_name, callback_type + + +class RunExperiment(Resource): + def __init__(self, runner: BlueskyRunner, context: BlueskyContext) -> None: + super().__init__() + self.runner = runner + self.context = context + + def put(self, plan_name: str, action: Actions): + status_and_message = StatusAndMessage(Status.FAILED, f"{action} not understood") + if action == Actions.START.value: + try: + plan, params, plan_name, callback_type = compose_start_args( + self.context, plan_name, action + ) + status_and_message = self.runner.start( + plan, params, plan_name, callback_type + ) + except Exception as e: + status_and_message = ErrorStatusAndMessage(e) + LOGGER.error(format_exception(e)) + + elif action == Actions.STOP.value: + status_and_message = self.runner.stop() + # no idea why mypy gives an attribute error here but nowhere else for this + # exact same situation... + return asdict(status_and_message) # type: ignore + + +class StopOrStatus(Resource): + def __init__(self, runner: BlueskyRunner) -> None: + super().__init__() + self.runner: BlueskyRunner = runner + + def put(self, action): + status_and_message = StatusAndMessage(Status.FAILED, f"{action} not understood") + if action == Actions.STOP.value: + status_and_message = self.runner.stop() + return asdict(status_and_message) + + def get(self, **kwargs): + action = kwargs.get("action") + status_and_message = StatusAndMessage(Status.FAILED, f"{action} not understood") + if action == Actions.STATUS.value: + LOGGER.debug( + f"Runner recieved status request - state of the runner object is: {self.runner.__dict__} - state of the RE is: {self.runner.RE.__dict__}" + ) + status_and_message = self.runner.current_status + return asdict(status_and_message) + + +class FlushLogs(Resource): + def put(self, **kwargs): + try: + status_and_message = StatusAndMessage( + Status.SUCCESS, f"Flushed debug log to {flush_debug_handler()}" + ) + except Exception as e: + status_and_message = StatusAndMessage( + Status.FAILED, f"Failed to flush debug log: {e}" + ) + return asdict(status_and_message) + + +def create_app( + test_config=None, + RE: RunEngine = RunEngine({}), + skip_startup_connection: bool = False, + use_external_callbacks: bool = False, +) -> tuple[Flask, BlueskyRunner]: + context = setup_context( + wait_for_connection=not skip_startup_connection, + ) + runner = BlueskyRunner( + RE, + context=context, + use_external_callbacks=use_external_callbacks, + skip_startup_connection=skip_startup_connection, + ) + app = Flask(__name__) + if test_config: + app.config.update(test_config) + api = Api(app) + api.add_resource( + RunExperiment, + "//", + resource_class_args=[runner, context], + ) + api.add_resource( + FlushLogs, + "/flush_debug_log", + ) + api.add_resource( + StopOrStatus, + "/", + resource_class_args=[runner], + ) + return app, runner + + +def create_targets(): + hyperion_port = 5005 + args = parse_cli_args() + do_default_logging_setup(dev_mode=args.dev_mode) + if not args.use_external_callbacks: + setup_callback_logging(args.dev_mode) + app, runner = create_app( + skip_startup_connection=args.skip_startup_connection, + use_external_callbacks=args.use_external_callbacks, + ) + return app, runner, hyperion_port, args.dev_mode + + +def main(): + app, runner, port, dev_mode = create_targets() + atexit.register(runner.shutdown) + flask_thread = threading.Thread( + target=lambda: app.run( + host="0.0.0.0", port=port, debug=True, use_reloader=False + ), + daemon=True, + ) + flask_thread.start() + LOGGER.info(f"Hyperion now listening on {port} ({'IN DEV' if dev_mode else ''})") + runner.wait_on_queue() + flask_thread.join() + + +if __name__ == "__main__": + main() diff --git a/tests/i24/serial/extruder/__init__.py b/src/mx_bluesky/hyperion/device_setup_plans/__init__.py similarity index 100% rename from tests/i24/serial/extruder/__init__.py rename to src/mx_bluesky/hyperion/device_setup_plans/__init__.py diff --git a/src/mx_bluesky/hyperion/device_setup_plans/dcm_pitch_roll_mirror_adjuster.py b/src/mx_bluesky/hyperion/device_setup_plans/dcm_pitch_roll_mirror_adjuster.py new file mode 100644 index 000000000..1b9a9c17a --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/dcm_pitch_roll_mirror_adjuster.py @@ -0,0 +1,134 @@ +import json + +import bluesky.plan_stubs as bps +from dodal.devices.focusing_mirror import ( + FocusingMirrorWithStripes, + MirrorStripe, + VFMMirrorVoltages, +) +from dodal.devices.undulator_dcm import UndulatorDCM +from dodal.devices.util.adjuster_plans import lookup_table_adjuster +from dodal.devices.util.lookup_tables import ( + linear_interpolation_lut, +) + +from mx_bluesky.hyperion.log import LOGGER + +MIRROR_VOLTAGE_GROUP = "MIRROR_VOLTAGE_GROUP" +DCM_GROUP = "DCM_GROUP" + + +def _apply_and_wait_for_voltages_to_settle( + stripe: MirrorStripe, + mirror: FocusingMirrorWithStripes, + mirror_voltages: VFMMirrorVoltages, +): + with open(mirror_voltages.voltage_lookup_table_path) as lut_file: + json_obj = json.load(lut_file) + + # sample mode is the only mode supported + sample_data = json_obj["sample"] + mirror_key = mirror.name.lower() + if stripe == MirrorStripe.BARE: + stripe_key = "bare" + elif stripe == MirrorStripe.RHODIUM: + stripe_key = "rh" + elif stripe == MirrorStripe.PLATINUM: + stripe_key = "pt" + else: + raise ValueError(f"Unsupported stripe '{stripe}'") + + required_voltages = sample_data[stripe_key][mirror_key] + for voltage_channel, required_voltage in zip( + mirror_voltages.voltage_channels.values(), required_voltages, strict=False + ): + LOGGER.debug( + f"Applying and waiting for voltage {voltage_channel.name} = {required_voltage}" + ) + yield from bps.abs_set( + voltage_channel, required_voltage, group=MIRROR_VOLTAGE_GROUP + ) + + yield from bps.wait(group=MIRROR_VOLTAGE_GROUP) + + +def adjust_mirror_stripe( + energy_kev, mirror: FocusingMirrorWithStripes, mirror_voltages: VFMMirrorVoltages +): + """Feedback should be OFF prior to entry, in order to prevent + feedback from making unnecessary corrections while beam is being adjusted.""" + stripe = mirror.energy_to_stripe(energy_kev) + + LOGGER.info( + f"Adjusting mirror stripe for {energy_kev}keV selecting {stripe} stripe" + ) + yield from bps.abs_set(mirror.stripe, stripe, wait=True) + yield from bps.trigger(mirror.apply_stripe) + + LOGGER.info("Adjusting mirror voltages...") + yield from _apply_and_wait_for_voltages_to_settle(stripe, mirror, mirror_voltages) + + +def adjust_dcm_pitch_roll_vfm_from_lut( + undulator_dcm: UndulatorDCM, + vfm: FocusingMirrorWithStripes, + vfm_mirror_voltages: VFMMirrorVoltages, + energy_kev, +): + """Beamline energy-change post-adjustments : Adjust DCM and VFM directly from lookup tables. + Lookups are performed against the Bragg angle which will have been automatically set by EPICS as a side-effect of the + energy change prior to calling this function. + Feedback should be OFF prior to entry, in order to prevent + feedback from making unnecessary corrections while beam is being adjusted.""" + + # DCM Pitch + dcm = undulator_dcm.dcm + LOGGER.info(f"Adjusting DCM and VFM for {energy_kev} keV") + bragg_deg = yield from bps.rd(dcm.bragg_in_degrees.user_readback) + LOGGER.info(f"Read Bragg angle = {bragg_deg} degrees") + dcm_pitch_adjuster = lookup_table_adjuster( + linear_interpolation_lut(undulator_dcm.dcm_pitch_converter_lookup_table_path), + dcm.pitch_in_mrad, + bragg_deg, + ) + yield from dcm_pitch_adjuster(DCM_GROUP) + # It's possible we can remove these waits but we need to check + LOGGER.info("Waiting for DCM pitch adjust to complete...") + + # DCM Roll + dcm_roll_adjuster = lookup_table_adjuster( + linear_interpolation_lut(undulator_dcm.dcm_roll_converter_lookup_table_path), + dcm.roll_in_mrad, + bragg_deg, + ) + yield from dcm_roll_adjuster(DCM_GROUP) + LOGGER.info("Waiting for DCM roll adjust to complete...") + + # DCM Perp pitch + offset_mm = undulator_dcm.dcm_fixed_offset_mm + LOGGER.info(f"Adjusting DCM offset to {offset_mm} mm") + yield from bps.abs_set(dcm.offset_in_mm, offset_mm, group=DCM_GROUP) + + # + # Adjust mirrors + # + + # No need to change HFM + + # Assumption is focus mode is already set to "sample" + # not sure how we check this + + # VFM Stripe selection + yield from adjust_mirror_stripe(energy_kev, vfm, vfm_mirror_voltages) + yield from bps.wait(DCM_GROUP) + + # VFM Adjust - for I03 this table always returns the same value + vfm_lut = vfm.bragg_to_lat_lookup_table_path + assert vfm_lut is not None + vfm_x_adjuster = lookup_table_adjuster( + linear_interpolation_lut(vfm_lut), + vfm.x_mm, + bragg_deg, + ) + LOGGER.info("Waiting for VFM Lat (Horizontal Translation) to complete...") + yield from vfm_x_adjuster() diff --git a/src/mx_bluesky/hyperion/device_setup_plans/manipulate_sample.py b/src/mx_bluesky/hyperion/device_setup_plans/manipulate_sample.py new file mode 100644 index 000000000..e0d3177e8 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/manipulate_sample.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import bluesky.plan_stubs as bps +from dodal.devices.aperturescatterguard import ( + AperturePositionGDANames, + ApertureScatterguard, +) +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight, BacklightPosition +from dodal.devices.detector.detector_motion import DetectorMotion +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.log import LOGGER + +LOWER_DETECTOR_SHUTTER_AFTER_SCAN = True + + +def begin_sample_environment_setup( + detector_motion: DetectorMotion, + attenuator: Attenuator, + transmission_fraction: float, + detector_distance: float, + group="setup_senv", +): + """Start all sample environment changes that can be initiated before OAV snapshots are taken""" + yield from bps.abs_set(detector_motion.shutter, 1, group=group) + yield from bps.abs_set(detector_motion.z, detector_distance, group=group) + yield from bps.abs_set(attenuator, transmission_fraction, group=group) + + +def setup_sample_environment( + aperture_scatterguard: ApertureScatterguard, + aperture_position_gda_name: AperturePositionGDANames | None, + backlight: Backlight, + group="setup_senv", +): + """Move the aperture into required position, move out the backlight.""" + + yield from move_aperture_if_required( + aperture_scatterguard, aperture_position_gda_name, group=group + ) + yield from bps.abs_set(backlight, BacklightPosition.OUT, group=group) + + +def move_aperture_if_required( + aperture_scatterguard: ApertureScatterguard, + aperture_position_gda_name: AperturePositionGDANames | None, + group="move_aperture", +): + if not aperture_position_gda_name: + previous_aperture_position = yield from bps.rd(aperture_scatterguard) + assert isinstance(previous_aperture_position, dict) + LOGGER.info( + f"Using previously set aperture position {previous_aperture_position['name']}" + ) + + else: + aperture_position = aperture_scatterguard.get_position_from_gda_aperture_name( + aperture_position_gda_name + ) + LOGGER.info(f"Setting aperture position to {aperture_position}") + yield from bps.abs_set( + aperture_scatterguard, + aperture_position, + group=group, + ) + + +def cleanup_sample_environment( + detector_motion: DetectorMotion, + group="cleanup_senv", +): + """Put the detector shutter back down""" + + yield from bps.abs_set( + detector_motion.shutter, + int(not LOWER_DETECTOR_SHUTTER_AFTER_SCAN), + group=group, + ) + + +def move_x_y_z( + smargon: Smargon, + x_mm: float | None = None, + y_mm: float | None = None, + z_mm: float | None = None, + wait=False, + group="move_x_y_z", +): + """Move the x, y, and z axes of the given smargon to the specified position. All + axes are optional.""" + + LOGGER.info(f"Moving smargon to x, y, z: {(x_mm, y_mm, z_mm)}") + if x_mm: + yield from bps.abs_set(smargon.x, x_mm, group=group) + if y_mm: + yield from bps.abs_set(smargon.y, y_mm, group=group) + if z_mm: + yield from bps.abs_set(smargon.z, z_mm, group=group) + if wait: + yield from bps.wait(group) + + +def move_phi_chi_omega( + smargon: Smargon, + phi: float | None = None, + chi: float | None = None, + omega: float | None = None, + wait=False, + group="move_phi_chi_omega", +): + """Move the x, y, and z axes of the given smargon to the specified position. All + axes are optional.""" + + LOGGER.info(f"Moving smargon to phi, chi, omega: {(phi, chi, omega)}") + if phi: + yield from bps.abs_set(smargon.phi, phi, group=group) + if chi: + yield from bps.abs_set(smargon.chi, chi, group=group) + if omega: + yield from bps.abs_set(smargon.omega, omega, group=group) + if wait: + yield from bps.wait(group) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/position_detector.py b/src/mx_bluesky/hyperion/device_setup_plans/position_detector.py new file mode 100644 index 000000000..a21bbfd62 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/position_detector.py @@ -0,0 +1,16 @@ +from bluesky import plan_stubs as bps +from dodal.devices.detector.detector_motion import DetectorMotion, ShutterState + +from mx_bluesky.hyperion.log import LOGGER + + +def set_detector_z_position( + detector_motion: DetectorMotion, detector_position: float, group=None +): + LOGGER.info(f"Moving detector to {detector_position} ({group})") + yield from bps.abs_set(detector_motion.z, detector_position, group=group) + + +def set_shutter(detector_motion: DetectorMotion, state: ShutterState, group=None): + LOGGER.info(f"Setting shutter to {state} ({group})") + yield from bps.abs_set(detector_motion.shutter, state, group=group) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/read_hardware_for_setup.py b/src/mx_bluesky/hyperion/device_setup_plans/read_hardware_for_setup.py new file mode 100644 index 000000000..60b9230a2 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/read_hardware_for_setup.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import bluesky.plan_stubs as bps +from dodal.devices.aperturescatterguard import ApertureScatterguard +from dodal.devices.attenuator import Attenuator +from dodal.devices.dcm import DCM +from dodal.devices.eiger import EigerDetector +from dodal.devices.flux import Flux +from dodal.devices.robot import BartRobot +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.undulator import Undulator + +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST + + +def read_hardware_pre_collection( + undulator: Undulator, + synchrotron: Synchrotron, + s4_slit_gaps: S4SlitGaps, + robot: BartRobot, + smargon: Smargon, +): + LOGGER.info("Reading status of beamline for callbacks, pre collection.") + yield from bps.create( + name=CONST.DESCRIPTORS.HARDWARE_READ_PRE + ) # gives name to event *descriptor* document + yield from bps.read(undulator.current_gap) + yield from bps.read(synchrotron.synchrotron_mode) + yield from bps.read(s4_slit_gaps.xgap) + yield from bps.read(s4_slit_gaps.ygap) + yield from bps.read(smargon.x) + yield from bps.read(smargon.y) + yield from bps.read(smargon.z) + yield from bps.save() + + +def read_hardware_during_collection( + aperture_scatterguard: ApertureScatterguard, + attenuator: Attenuator, + flux: Flux, + dcm: DCM, + detector: EigerDetector, +): + LOGGER.info("Reading status of beamline for callbacks, during collection.") + yield from bps.create(name=CONST.DESCRIPTORS.HARDWARE_READ_DURING) + yield from bps.read(aperture_scatterguard) + yield from bps.read(attenuator.actual_transmission) + yield from bps.read(flux.flux_reading) + yield from bps.read(dcm.energy_in_kev) + yield from bps.read(detector.bit_depth) + yield from bps.save() + + +def read_hardware_for_zocalo(detector: EigerDetector): + yield from bps.create(name=CONST.DESCRIPTORS.ZOCALO_HW_READ) + yield from bps.read(detector.odin.file_writer.id) + yield from bps.save() diff --git a/src/mx_bluesky/hyperion/device_setup_plans/setup_oav.py b/src/mx_bluesky/hyperion/device_setup_plans/setup_oav.py new file mode 100644 index 000000000..a1cee8c90 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/setup_oav.py @@ -0,0 +1,90 @@ +from functools import partial + +import bluesky.plan_stubs as bps +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_errors import OAVError_ZoomLevelNotFound +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.oav.utils import ColorMode + +oav_group = "oav_setup" +# Helper function to make sure we set the waiting groups correctly +set_using_group = partial(bps.abs_set, group=oav_group) + + +def setup_pin_tip_detection_params( + pin_tip_detect_device: PinTipDetection, parameters: OAVParameters +): + # select which blur to apply to image + yield from set_using_group( + pin_tip_detect_device.preprocess_operation, parameters.preprocess + ) + + # sets length scale for blurring + yield from set_using_group( + pin_tip_detect_device.preprocess_ksize, parameters.preprocess_K_size + ) + + # Canny edge detect - lower + yield from set_using_group( + pin_tip_detect_device.canny_lower_threshold, + parameters.canny_edge_lower_threshold, + ) + + # Canny edge detect - upper + yield from set_using_group( + pin_tip_detect_device.canny_upper_threshold, + parameters.canny_edge_upper_threshold, + ) + + # "Close" morphological operation + yield from set_using_group( + pin_tip_detect_device.close_ksize, parameters.close_ksize + ) + + # Sample detection direction + yield from set_using_group( + pin_tip_detect_device.scan_direction, parameters.direction + ) + + # Minimum height + yield from set_using_group( + pin_tip_detect_device.min_tip_height, + parameters.minimum_height, + ) + + +def setup_general_oav_params(oav: OAV, parameters: OAVParameters): + yield from set_using_group(oav.cam.color_mode, ColorMode.RGB1) + yield from set_using_group(oav.cam.acquire_period, parameters.acquire_period) + yield from set_using_group(oav.cam.acquire_time, parameters.exposure) + yield from set_using_group(oav.cam.gain, parameters.gain) + + zoom_level_str = f"{float(parameters.zoom)}x" + if zoom_level_str not in oav.zoom_controller.allowed_zoom_levels: + raise OAVError_ZoomLevelNotFound( + f"Found {zoom_level_str} as a zoom level but expected one of {oav.zoom_controller.allowed_zoom_levels}" + ) + + yield from bps.abs_set( + oav.zoom_controller, + zoom_level_str, + wait=True, + ) + + +def pre_centring_setup_oav( + oav: OAV, + parameters: OAVParameters, + pin_tip_detection_device: PinTipDetection, +): + """ + Setup OAV PVs with required values. + """ + yield from setup_general_oav_params(oav, parameters) + yield from setup_pin_tip_detection_params(pin_tip_detection_device, parameters) + yield from bps.wait(oav_group) + + """ + TODO: We require setting the backlight brightness to that in the json, we can't do this currently without a PV. + """ diff --git a/src/mx_bluesky/hyperion/device_setup_plans/setup_panda.py b/src/mx_bluesky/hyperion/device_setup_plans/setup_panda.py new file mode 100644 index 000000000..038301440 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/setup_panda.py @@ -0,0 +1,217 @@ +from datetime import datetime +from enum import Enum +from importlib import resources +from pathlib import Path + +import bluesky.plan_stubs as bps +from blueapi.core import MsgGenerator +from dodal.common.beamlines.beamline_utils import get_directory_provider +from dodal.devices.fast_grid_scan import PandAGridScanParams +from ophyd_async.core import load_device +from ophyd_async.panda import ( + HDFPanda, + SeqTable, + SeqTableRow, + SeqTrigger, + seq_table_from_rows, +) + +import mx_bluesky.hyperion.resources.panda as panda_resource +from mx_bluesky.hyperion.log import LOGGER + +MM_TO_ENCODER_COUNTS = 200000 +GENERAL_TIMEOUT = 60 +TICKS_PER_MS = 1000 # Panda sequencer prescaler will be set to us + + +class Enabled(Enum): + ENABLED = "ONE" + DISABLED = "ZERO" + + +class PcapArm(Enum): + ARMED = "Arm" + DISARMED = "Disarm" + + +def _get_seq_table( + parameters: PandAGridScanParams, exposure_distance_mm, time_between_steps_ms +) -> SeqTable: + """ + Generate the sequencer table for the panda. + + - Sending a 'trigger' means trigger PCAP internally and send signal to Eiger via physical panda output + + SEQUENCER TABLE: + + 1. Wait for physical trigger from motion script to mark start of scan / change of direction + 2. Wait for POSA (X2) to be greater than X_START and send x_steps triggers every time_between_steps_ms + 3. Wait for physical trigger from motion script to mark change of direction + 4. Wait for POSA (X2) to be less than X_START + X_STEP_SIZE * x_steps + exposure distance, then + send x_steps triggers every time_between_steps_ms + 5. Go back to step one. + + For a more detailed explanation and a diagram, see https://github.com/DiamondLightSource/hyperion/wiki/PandA-constant%E2%80%90motion-scanning + + For documentation on Panda itself, see https://pandablocks.github.io/PandABlocks-FPGA/master/index.html + + Args: + exposure_distance_mm: The distance travelled by the sample each time the detector is exposed: exposure time * sample velocity + time_between_steps_ms: The time taken to traverse between each grid step. + parameters: Parameters for the panda gridscan + + Returns: + An instance of SeqTable describing the panda sequencer table + """ + + start_of_grid_x_counts = int(parameters.x_start * MM_TO_ENCODER_COUNTS) + + # x_start is the first trigger point, so we need to travel to x_steps-1 for the final trigger point + end_of_grid_x_counts = int( + start_of_grid_x_counts + + (parameters.x_step_size * (parameters.x_steps - 1) * MM_TO_ENCODER_COUNTS) + ) + + exposure_distance_x_counts = int(exposure_distance_mm * MM_TO_ENCODER_COUNTS) + + num_pulses = parameters.x_steps + + delay_between_pulses = time_between_steps_ms * TICKS_PER_MS + + PULSE_WIDTH_US = 1 + + assert delay_between_pulses > PULSE_WIDTH_US + + # BITA_1 trigger wired from TTLIN1, this is the trigger input + + # +ve direction scan + rows = [SeqTableRow(trigger=SeqTrigger.BITA_1, time2=1)] + + rows.append( + SeqTableRow( + repeats=num_pulses, + trigger=SeqTrigger.POSA_GT, + position=start_of_grid_x_counts, + time1=PULSE_WIDTH_US, + outa1=True, + time2=delay_between_pulses - PULSE_WIDTH_US, + outa2=False, + ) + ) + + # -ve direction scan + rows.append(SeqTableRow(trigger=SeqTrigger.BITA_1, time2=1)) + + rows.append( + SeqTableRow( + repeats=num_pulses, + trigger=SeqTrigger.POSA_LT, + position=end_of_grid_x_counts + exposure_distance_x_counts, + time1=PULSE_WIDTH_US, + outa1=True, + time2=delay_between_pulses - PULSE_WIDTH_US, + outa2=False, + ) + ) + + table = seq_table_from_rows(*rows) + + return table + + +def setup_panda_for_flyscan( + panda: HDFPanda, + parameters: PandAGridScanParams, + initial_x: float, + exposure_time_s: float, + time_between_x_steps_ms: float, + sample_velocity_mm_per_s: float, +) -> MsgGenerator: + """Configures the PandA device for a flyscan. + Sets PVs from a yaml file, calibrates the encoder, and + adjusts the sequencer table based off the grid parameters. Yaml file can be + created using ophyd_async.core.save_device() + + Args: + panda (HDFPanda): The PandA Ophyd device + parameters (PandAGridScanParams): Grid parameters + initial_x (float): Motor positions at time of PandA setup + exposure_time_s (float): Detector exposure time per trigger + time_between_x_steps_ms (float): Time, in ms, between each trigger. Equal to deadtime + exposure time + sample_velocity_mm_per_s (float): Velocity of the sample in mm/s = x_step_size_mm * 1000 / + time_between_x_steps_ms + Returns: + MsgGenerator + + Yields: + Iterator[MsgGenerator] + """ + assert parameters.x_steps > 0 + assert time_between_x_steps_ms * 1000 >= exposure_time_s + assert sample_velocity_mm_per_s * exposure_time_s < parameters.x_step_size + + yield from bps.stage(panda, group="panda-config") + + with resources.as_file( + resources.files(panda_resource) / "panda-gridscan.yaml" + ) as config_yaml_path: + yield from load_device(panda, str(config_yaml_path)) + + # Home the PandA X encoder using current motor position + yield from bps.abs_set( + panda.inenc[1].setp, # type: ignore + initial_x * MM_TO_ENCODER_COUNTS, + wait=True, + ) + + yield from bps.abs_set(panda.pulse[1].width, exposure_time_s, group="panda-config") + + exposure_distance_mm = sample_velocity_mm_per_s * exposure_time_s + + table = _get_seq_table(parameters, exposure_distance_mm, time_between_x_steps_ms) + + yield from bps.abs_set(panda.seq[1].table, table, group="panda-config") + + yield from bps.abs_set( + panda.pcap.enable, # type: ignore + Enabled.ENABLED.value, + group="panda-config", + ) + + # Values need to be set before blocks are enabled, so wait here + yield from bps.wait(group="panda-config", timeout=GENERAL_TIMEOUT) + + LOGGER.info(f"PandA sequencer table has been set to: {str(table)}") + table_readback = yield from bps.rd(panda.seq[1].table) + LOGGER.debug(f"PandA sequencer table readback is: {str(table_readback)}") + + yield from arm_panda_for_gridscan(panda) + + +def arm_panda_for_gridscan(panda: HDFPanda, group="arm_panda_gridscan"): + yield from bps.abs_set(panda.seq[1].enable, Enabled.ENABLED.value, group=group) # type: ignore + yield from bps.abs_set(panda.pulse[1].enable, Enabled.ENABLED.value, group=group) # type: ignore + yield from bps.abs_set(panda.counter[1].enable, Enabled.ENABLED.value, group=group) # type: ignore + yield from bps.abs_set(panda.pcap.arm, PcapArm.ARMED.value, group=group) # type: ignore + yield from bps.wait(group=group, timeout=GENERAL_TIMEOUT) + LOGGER.info("PandA has been armed") + + +def disarm_panda_for_gridscan(panda, group="disarm_panda_gridscan") -> MsgGenerator: + yield from bps.abs_set(panda.pcap.arm, PcapArm.DISARMED.value, group=group) # type: ignore + yield from bps.abs_set(panda.counter[1].enable, Enabled.DISABLED.value, group=group) # type: ignore + yield from bps.abs_set(panda.seq[1].enable, Enabled.DISABLED.value, group=group) + yield from bps.abs_set(panda.pulse[1].enable, Enabled.DISABLED.value, group=group) + yield from bps.abs_set(panda.pcap.enable, Enabled.DISABLED.value, group=group) # type: ignore + yield from bps.wait(group=group, timeout=GENERAL_TIMEOUT) + + +def set_panda_directory(panda_directory: Path) -> MsgGenerator: + """Updates the root folder which is used by the PandA's PCAP.""" + + suffix = datetime.now().strftime("_%Y%m%d%H%M%S") + + async def set_panda_dir(): + await get_directory_provider().update(directory=panda_directory, suffix=suffix) + + yield from bps.wait_for([set_panda_dir]) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/setup_zebra.py b/src/mx_bluesky/hyperion/device_setup_plans/setup_zebra.py new file mode 100644 index 000000000..c62950d1a --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/setup_zebra.py @@ -0,0 +1,178 @@ +from collections.abc import Callable +from functools import wraps + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +from blueapi.core import MsgGenerator +from dodal.devices.zebra import ( + DISCONNECT, + IN1_TTL, + IN3_TTL, + IN4_TTL, + OR1, + PC_PULSE, + TTL_DETECTOR, + TTL_PANDA, + TTL_SHUTTER, + TTL_XSPRESS3, + ArmDemand, + EncEnum, + I03Axes, + RotationDirection, + SoftInState, + Zebra, +) + +from mx_bluesky.hyperion.log import LOGGER + +ZEBRA_STATUS_TIMEOUT = 30 + + +def bluesky_retry(func: Callable): + """Decorator that will retry the decorated plan if it fails. + + Use this with care as it knows nothing about the state of the world when things fail. + If it is possible that your plan fails when the beamline is in a transient state that + the plan could not act on do not use this decorator without doing some more intelligent + clean up. + + You should avoid using this decorator often in general production as it hides errors, + instead it should be used only for debugging these underlying errors. + """ + + @wraps(func) + def newfunc(*args, **kwargs): + def log_and_retry(exception): + LOGGER.error(f"Function {func.__name__} failed with {exception}, retrying") + yield from func(*args, **kwargs) + + yield from bpp.contingency_wrapper( + func(*args, **kwargs), except_plan=log_and_retry, auto_raise=False + ) + + return newfunc + + +def arm_zebra(zebra: Zebra): + yield from bps.abs_set(zebra.pc.arm, ArmDemand.ARM, wait=True) + + +def disarm_zebra(zebra: Zebra): + yield from bps.abs_set(zebra.pc.arm, ArmDemand.DISARM, wait=True) + + +@bluesky_retry +def setup_zebra_for_rotation( + zebra: Zebra, + axis: EncEnum = I03Axes.OMEGA, + start_angle: float = 0, + scan_width: float = 360, + shutter_opening_deg: float = 2.5, + shutter_opening_s: float = 0.04, + direction: RotationDirection = RotationDirection.POSITIVE, + group: str = "setup_zebra_for_rotation", + wait: bool = True, +): + """Set up the Zebra to collect a rotation dataset. Any plan using this is + responsible for setting the smargon velocity appropriately so that the desired + image width is achieved with the exposure time given here. + + Parameters: + zebra: The zebra device to use + axis: I03 axes enum representing which axis to use for position + compare. Currently always omega. + start_angle: Position at which the scan should begin, in degrees. + scan_width: Total angle through which to collect, in degrees. + shutter_opening_deg:How many degrees of rotation it takes for the fast shutter + to open. Increases the gate width. + shutter_opening_s: How many seconds it takes for the fast shutter to open. The + detector pulse is delayed after the shutter signal by this + amount. + direction: RotationDirection enum for positive or negative. + Defaults to Positive. + group: A name for the group of statuses generated + wait: Block until all the settings have completed + """ + if not isinstance(direction, RotationDirection): + raise ValueError( + "Disallowed rotation direction provided to Zebra setup plan. " + "Use RotationDirection.POSITIVE or RotationDirection.NEGATIVE." + ) + yield from bps.abs_set(zebra.pc.dir, direction.value, group=group) + LOGGER.info("ZEBRA SETUP: START") + # must be on for shutter trigger to be enabled + yield from bps.abs_set(zebra.inputs.soft_in_1, SoftInState.YES, group=group) + # Set gate start, adjust for shutter opening time if necessary + LOGGER.info(f"ZEBRA SETUP: degrees to adjust for shutter = {shutter_opening_deg}") + LOGGER.info(f"ZEBRA SETUP: start angle start: {start_angle}") + LOGGER.info(f"ZEBRA SETUP: start angle adjusted, gate start set to: {start_angle}") + yield from bps.abs_set(zebra.pc.gate_start, start_angle, group=group) + # set gate width to total width + yield from bps.abs_set( + zebra.pc.gate_width, scan_width + shutter_opening_deg, group=group + ) + LOGGER.info( + f"Pulse start set to shutter open time, set to: {abs(shutter_opening_s)}" + ) + yield from bps.abs_set(zebra.pc.pulse_start, abs(shutter_opening_s), group=group) + # Set gate position to be angle of interest + yield from bps.abs_set(zebra.pc.gate_trigger, axis.value, group=group) + # Trigger the shutter with the gate (from PC_GATE & SOFTIN1 -> OR1) + yield from bps.abs_set(zebra.output.out_pvs[TTL_SHUTTER], OR1, group=group) + # Trigger the detector with a pulse + yield from bps.abs_set(zebra.output.out_pvs[TTL_DETECTOR], PC_PULSE, group=group) + # Don't use the fluorescence detector + yield from bps.abs_set(zebra.output.out_pvs[TTL_XSPRESS3], DISCONNECT, group=group) + yield from bps.abs_set(zebra.output.pulse_1.input, DISCONNECT, group=group) + LOGGER.info(f"ZEBRA SETUP: END - {'' if wait else 'not'} waiting for completion") + if wait: + yield from bps.wait(group, timeout=ZEBRA_STATUS_TIMEOUT) + + +@bluesky_retry +def setup_zebra_for_gridscan(zebra: Zebra, group="setup_zebra_for_gridscan", wait=True): + yield from bps.abs_set(zebra.output.out_pvs[TTL_DETECTOR], IN3_TTL, group=group) + yield from bps.abs_set(zebra.output.out_pvs[TTL_SHUTTER], IN4_TTL, group=group) + yield from bps.abs_set(zebra.output.out_pvs[TTL_XSPRESS3], DISCONNECT, group=group) + yield from bps.abs_set(zebra.output.pulse_1.input, DISCONNECT, group=group) + + if wait: + yield from bps.wait(group, timeout=ZEBRA_STATUS_TIMEOUT) + + +@bluesky_retry +def set_zebra_shutter_to_manual( + zebra: Zebra, group="set_zebra_shutter_to_manual", wait=True +) -> MsgGenerator: + yield from bps.abs_set(zebra.output.out_pvs[TTL_DETECTOR], PC_PULSE, group=group) + yield from bps.abs_set(zebra.output.out_pvs[TTL_SHUTTER], OR1, group=group) + + if wait: + yield from bps.wait(group, timeout=ZEBRA_STATUS_TIMEOUT) + + +@bluesky_retry +def make_trigger_safe(zebra: Zebra, group="make_zebra_safe", wait=True): + yield from bps.abs_set( + zebra.inputs.soft_in_1, SoftInState.NO, wait=wait, group=group + ) + + +@bluesky_retry +def setup_zebra_for_panda_flyscan( + zebra: Zebra, group="setup_zebra_for_panda_flyscan", wait=True +): + # Forwards eiger trigger signal from panda + yield from bps.abs_set(zebra.output.out_pvs[TTL_DETECTOR], IN1_TTL, group=group) + + # Forwards signal from PPMAC to fast shutter. High while panda PLC is running + yield from bps.abs_set(zebra.output.out_pvs[TTL_SHUTTER], IN4_TTL, group=group) + + yield from bps.abs_set(zebra.output.out_pvs[3], DISCONNECT, group=group) + + yield from bps.abs_set( + zebra.output.out_pvs[TTL_PANDA], IN3_TTL, group=group + ) # Tells panda that motion is beginning/changing direction + + if wait: + yield from bps.wait(group, timeout=ZEBRA_STATUS_TIMEOUT) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/smargon.py b/src/mx_bluesky/hyperion/device_setup_plans/smargon.py new file mode 100644 index 000000000..528a96b5d --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/smargon.py @@ -0,0 +1,25 @@ +import numpy as np +from bluesky import plan_stubs as bps +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.exceptions import WarningException + + +def move_smargon_warn_on_out_of_range( + smargon: Smargon, position: np.ndarray | list[float] | tuple[float, float, float] +): + """Throws a WarningException if the specified position is out of range for the + smargon. Otherwise moves to that position.""" + limits = yield from smargon.get_xyz_limits() + if not limits.position_valid(position): + raise WarningException( + "Pin tip centring failed - pin too long/short/bent and out of range" + ) + yield from bps.mv( + smargon.x, + position[0], + smargon.y, + position[1], + smargon.z, + position[2], + ) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/utils.py b/src/mx_bluesky/hyperion/device_setup_plans/utils.py new file mode 100644 index 000000000..481190178 --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/utils.py @@ -0,0 +1,44 @@ +from collections.abc import Generator + +from bluesky import plan_stubs as bps +from bluesky import preprocessors as bpp +from bluesky.utils import Msg +from dodal.devices.detector.detector_motion import DetectorMotion, ShutterState +from dodal.devices.eiger import EigerDetector + +from mx_bluesky.hyperion.device_setup_plans.position_detector import ( + set_detector_z_position, + set_shutter, +) + + +def start_preparing_data_collection_then_do_plan( + eiger: EigerDetector, + detector_motion: DetectorMotion, + detector_distance_mm: float | None, + plan_to_run: Generator[Msg, None, None], + group="ready_for_data_collection", +) -> Generator[Msg, None, None]: + """Starts preparing for the next data collection and then runs the + given plan. + + Preparation consists of: + * Arming the Eiger + * Moving the detector to the specified position + * Opening the detect shutter + If the plan fails it will disarm the eiger. + """ + + def wrapped_plan(): + yield from bps.abs_set(eiger.do_arm, 1, group=group) + if detector_distance_mm: + yield from set_detector_z_position( + detector_motion, detector_distance_mm, group + ) + yield from set_shutter(detector_motion, ShutterState.OPEN, group) + yield from plan_to_run + + yield from bpp.contingency_wrapper( + wrapped_plan(), + except_plan=lambda e: (yield from bps.stop(eiger)), + ) diff --git a/src/mx_bluesky/hyperion/device_setup_plans/xbpm_feedback.py b/src/mx_bluesky/hyperion/device_setup_plans/xbpm_feedback.py new file mode 100644 index 000000000..58defed3d --- /dev/null +++ b/src/mx_bluesky/hyperion/device_setup_plans/xbpm_feedback.py @@ -0,0 +1,90 @@ +from bluesky import plan_stubs as bps +from bluesky.preprocessors import finalize_wrapper +from bluesky.utils import make_decorator +from dodal.devices.attenuator import Attenuator +from dodal.devices.xbpm_feedback import Pause, XBPMFeedback + +from mx_bluesky.hyperion.log import LOGGER + + +def _check_and_pause_feedback( + xbpm_feedback: XBPMFeedback, + attenuator: Attenuator, + desired_transmission_fraction: float, +): + """Checks that the xbpm is in position before collection then turns it off. + + Args: + xbpm_feedback (XBPMFeedback): The XBPM device that is responsible for keeping + the beam in position + attenuator (Attenuator): The attenuator used to set transmission + desired_transmission_fraction (float): The desired transmission for the collection + + """ + yield from bps.mv(attenuator, 1.0) + LOGGER.info("Waiting for XBPM feedback before collection") + yield from bps.trigger(xbpm_feedback, wait=True) + LOGGER.info( + "XPBM feedback in position, pausing and setting transmission for collection" + ) + yield from bps.mv(xbpm_feedback.pause_feedback, Pause.PAUSE) + yield from bps.mv(attenuator, desired_transmission_fraction) + + +def _unpause_xbpm_feedback_and_set_transmission_to_1( + xbpm_feedback: XBPMFeedback, attenuator: Attenuator +): + """Turns the XBPM feedback back on and sets transmission to 1 so that it keeps the + beam aligned whilst not collecting. + + Args: + xbpm_feedback (XBPMFeedback): The XBPM device that is responsible for keeping + the beam in position + attenuator (Attenuator): The attenuator used to set transmission + """ + yield from bps.mv(xbpm_feedback.pause_feedback, Pause.RUN, attenuator, 1.0) + + +def transmission_and_xbpm_feedback_for_collection_wrapper( + plan, + xbpm_feedback: XBPMFeedback, + attenuator: Attenuator, + desired_transmission_fraction: float, +): + """Sets the transmission for the data collection, ensuring the xbpm feedback is valid + this wrapper should be run around every data collection. + + XBPM feedback isn't reliable during collections due to: + * Objects (e.g. attenuator) crossing the beam can cause large (incorrect) feedback movements + * Lower transmissions/higher energies are less reliable for the xbpm + + So we need to keep the transmission at 100% and the feedback on when not collecting + and then turn it off and set the correct transmission for collection. The feedback + mostly accounts for slow thermal drift so it is safe to assume that the beam is + stable during a collection. + + Args: + plan: The plan performing the data collection + xbpm_feedback (XBPMFeedback): The XBPM device that is responsible for keeping + the beam in position + attenuator (Attenuator): The attenuator used to set transmission + desired_transmission_fraction (float): The desired transmission for the collection + """ + + def _inner_plan(): + yield from _check_and_pause_feedback( + xbpm_feedback, attenuator, desired_transmission_fraction + ) + return (yield from plan) + + return ( + yield from finalize_wrapper( + _inner_plan(), + _unpause_xbpm_feedback_and_set_transmission_to_1(xbpm_feedback, attenuator), + ) + ) + + +transmission_and_xbpm_feedback_for_collection_decorator = make_decorator( + transmission_and_xbpm_feedback_for_collection_wrapper +) diff --git a/src/mx_bluesky/hyperion/exceptions.py b/src/mx_bluesky/hyperion/exceptions.py new file mode 100644 index 000000000..a617e6d07 --- /dev/null +++ b/src/mx_bluesky/hyperion/exceptions.py @@ -0,0 +1,47 @@ +from collections.abc import Callable, Generator +from typing import TypeVar + +from bluesky.plan_stubs import null +from bluesky.preprocessors import contingency_wrapper +from bluesky.utils import Msg + + +class WarningException(Exception): + """An exception used when we want to warn GDA of a + problem but continue with UDC anyway""" + + pass + + +T = TypeVar("T") + + +def catch_exception_and_warn( + exception_to_catch: type[Exception], + func: Callable[..., Generator[Msg, None, T]], + *args, + **kwargs, +) -> Generator[Msg, None, T]: + """A plan wrapper to catch a specific exception and instead raise a WarningException, + so that UDC is not halted + + Example usage: + + 'def plan_which_can_raise_exception_a(*args, **kwargs): + ... + yield from catch_exception_and_warn(ExceptionA, plan_which_can_raise_exception_a, **args, **kwargs)' + + This will catch ExceptionA raised by the plan and instead raise a WarningException + """ + + def warn_if_exception_matches(exception: Exception): + if isinstance(exception, exception_to_catch): + raise WarningException(str(exception)) + yield from null() + + return ( + yield from contingency_wrapper( + func(*args, **kwargs), + except_plan=warn_if_exception_matches, + ) + ) diff --git a/src/mx_bluesky/hyperion/experiment_plans/__init__.py b/src/mx_bluesky/hyperion/experiment_plans/__init__.py new file mode 100644 index 000000000..4ab1a2b44 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/__init__.py @@ -0,0 +1,30 @@ +"""This module contains the experimental plans which hyperion can run. + +The __all__ list in here are the plans that are externally available from outside Hyperion. +""" + +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + flyscan_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan import ( + grid_detect_then_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan import ( + pin_tip_centre_then_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan import ( + robot_load_then_centre, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + multi_rotation_scan, + rotation_scan, +) + +__all__ = [ + "flyscan_xray_centre", + "grid_detect_then_xray_centre", + "rotation_scan", + "pin_tip_centre_then_xray_centre", + "multi_rotation_scan", + "robot_load_then_centre", +] diff --git a/src/mx_bluesky/hyperion/experiment_plans/experiment_registry.py b/src/mx_bluesky/hyperion/experiment_plans/experiment_registry.py new file mode 100644 index 000000000..860608624 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/experiment_registry.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from collections.abc import Callable +from typing import TypedDict + +import mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan as flyscan_xray_centre_plan +import mx_bluesky.hyperion.experiment_plans.rotation_scan_plan as rotation_scan_plan +from mx_bluesky.hyperion.experiment_plans import ( + grid_detect_then_xray_centre_plan, + pin_centre_then_xray_centre_plan, + robot_load_then_centre_plan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + CallbacksFactory, + create_gridscan_callbacks, + create_robot_load_and_centre_callbacks, + create_rotation_callbacks, +) +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + PinTipCentreThenXrayCentre, + RobotLoadThenCentre, + ThreeDGridScan, +) +from mx_bluesky.hyperion.parameters.rotation import MultiRotationScan, RotationScan + + +def not_implemented(): + raise NotImplementedError + + +def do_nothing(): + pass + + +class ExperimentRegistryEntry(TypedDict): + setup: Callable + param_type: type[ + ThreeDGridScan + | GridScanWithEdgeDetect + | RotationScan + | MultiRotationScan + | PinTipCentreThenXrayCentre + | RobotLoadThenCentre + ] + callbacks_factory: CallbacksFactory + + +PLAN_REGISTRY: dict[str, ExperimentRegistryEntry] = { + "flyscan_xray_centre": { + "setup": flyscan_xray_centre_plan.create_devices, + "param_type": ThreeDGridScan, + "callbacks_factory": create_gridscan_callbacks, + }, + "grid_detect_then_xray_centre": { + "setup": grid_detect_then_xray_centre_plan.create_devices, + "param_type": GridScanWithEdgeDetect, + "callbacks_factory": create_gridscan_callbacks, + }, + "rotation_scan": { + "setup": rotation_scan_plan.create_devices, + "param_type": RotationScan, + "callbacks_factory": create_rotation_callbacks, + }, + "pin_tip_centre_then_xray_centre": { + "setup": pin_centre_then_xray_centre_plan.create_devices, + "param_type": PinTipCentreThenXrayCentre, + "callbacks_factory": create_gridscan_callbacks, + }, + "robot_load_then_centre": { + "setup": robot_load_then_centre_plan.create_devices, + "param_type": RobotLoadThenCentre, + "callbacks_factory": create_robot_load_and_centre_callbacks, + }, + "multi_rotation_scan": { + "setup": rotation_scan_plan.create_devices, + "param_type": MultiRotationScan, + "callbacks_factory": create_rotation_callbacks, + }, +} + + +class PlanNotFound(Exception): + pass diff --git a/src/mx_bluesky/hyperion/experiment_plans/flyscan_xray_centre_plan.py b/src/mx_bluesky/hyperion/experiment_plans/flyscan_xray_centre_plan.py new file mode 100755 index 000000000..7f498cdbb --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/flyscan_xray_centre_plan.py @@ -0,0 +1,520 @@ +from __future__ import annotations + +import dataclasses +from collections.abc import Callable +from functools import partial +from pathlib import Path +from time import time +from typing import Protocol + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import numpy as np +from blueapi.core import BlueskyContext, MsgGenerator +from dodal.devices.aperturescatterguard import ( + AperturePosition, + ApertureScatterguard, +) +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight +from dodal.devices.dcm import DCM +from dodal.devices.eiger import EigerDetector +from dodal.devices.fast_grid_scan import ( + FastGridScanCommon, + PandAFastGridScan, + ZebraFastGridScan, +) +from dodal.devices.fast_grid_scan import ( + set_fast_grid_scan_params as set_flyscan_params, +) +from dodal.devices.flux import Flux +from dodal.devices.robot import BartRobot +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon, StubPosition +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.undulator import Undulator +from dodal.devices.xbpm_feedback import XBPMFeedback +from dodal.devices.zebra import Zebra +from dodal.devices.zocalo.zocalo_results import ( + ZOCALO_READING_PLAN_NAME, + ZOCALO_STAGE_GROUP, + ZocaloResults, + get_processing_result, +) +from dodal.plans.check_topup import check_topup_and_wait_if_necessary +from ophyd_async.panda import HDFPanda +from scanspec.core import AxesPoints, Axis + +from mx_bluesky.hyperion.device_setup_plans.manipulate_sample import move_x_y_z +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_for_zocalo, + read_hardware_pre_collection, +) +from mx_bluesky.hyperion.device_setup_plans.setup_panda import ( + disarm_panda_for_gridscan, + set_panda_directory, + setup_panda_for_flyscan, +) +from mx_bluesky.hyperion.device_setup_plans.setup_zebra import ( + set_zebra_shutter_to_manual, + setup_zebra_for_gridscan, + setup_zebra_for_panda_flyscan, +) +from mx_bluesky.hyperion.device_setup_plans.xbpm_feedback import ( + transmission_and_xbpm_feedback_for_collection_decorator, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from mx_bluesky.hyperion.tracing import TRACER +from mx_bluesky.hyperion.utils.context import device_composite_from_context + + +class SmargonSpeedException(Exception): + pass + + +@dataclasses.dataclass +class FlyScanXRayCentreComposite: + """All devices which are directly or indirectly required by this plan""" + + aperture_scatterguard: ApertureScatterguard + attenuator: Attenuator + backlight: Backlight + dcm: DCM + eiger: EigerDetector + zebra_fast_grid_scan: ZebraFastGridScan + flux: Flux + s4_slit_gaps: S4SlitGaps + smargon: Smargon + undulator: Undulator + synchrotron: Synchrotron + xbpm_feedback: XBPMFeedback + zebra: Zebra + zocalo: ZocaloResults + panda: HDFPanda + panda_fast_grid_scan: PandAFastGridScan + robot: BartRobot + + @property + def sample_motors(self) -> Smargon: + """Convenience alias with a more user-friendly name""" + return self.smargon + + +def create_devices(context: BlueskyContext) -> FlyScanXRayCentreComposite: + """Creates the devices required for the plan and connect to them""" + return device_composite_from_context(context, FlyScanXRayCentreComposite) + + +def flyscan_xray_centre( + composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, +) -> MsgGenerator: + """Create the plan to run the grid scan based on provided parameters. + + The ispyb handler should be added to the whole gridscan as we want to capture errors + at any point in it. + + Args: + parameters (ThreeDGridScan): The parameters to run the scan. + + Returns: + Generator: The plan for the gridscan + """ + parameters.features.update_self_from_server() + composite.eiger.set_detector_parameters(parameters.detector_params) + composite.zocalo.zocalo_environment = parameters.zocalo_environment + + feature_controlled = _get_feature_controlled(composite, parameters) + + @bpp.set_run_key_decorator(CONST.PLAN.GRIDSCAN_OUTER) + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + CONST.TRIGGER.ZOCALO: CONST.PLAN.DO_FGS, + "zocalo_environment": parameters.zocalo_environment, + "hyperion_parameters": parameters.json(), + "activate_callbacks": [ + "GridscanNexusFileCallback", + ], + } + ) + @bpp.finalize_decorator(lambda: feature_controlled.tidy_plan(composite)) + @transmission_and_xbpm_feedback_for_collection_decorator( + composite.xbpm_feedback, + composite.attenuator, + parameters.transmission_frac, + ) + def run_gridscan_and_move_and_tidy( + fgs_composite: FlyScanXRayCentreComposite, + params: ThreeDGridScan, + feature_controlled: _FeatureControlled, + ): + yield from run_gridscan_and_move(fgs_composite, params, feature_controlled) + + return run_gridscan_and_move_and_tidy(composite, parameters, feature_controlled) + + +@bpp.set_run_key_decorator(CONST.PLAN.GRIDSCAN_AND_MOVE) +@bpp.run_decorator(md={"subplan_name": CONST.PLAN.GRIDSCAN_AND_MOVE}) +def run_gridscan_and_move( + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, + feature_controlled: _FeatureControlled, +) -> MsgGenerator: + """A multi-run plan which runs a gridscan, gets the results from zocalo + and moves to the centre of mass determined by zocalo""" + + # We get the initial motor positions so we can return to them on zocalo failure + initial_xyz = np.array( + [ + (yield from bps.rd(fgs_composite.sample_motors.x)), + (yield from bps.rd(fgs_composite.sample_motors.y)), + (yield from bps.rd(fgs_composite.sample_motors.z)), + ] + ) + + yield from feature_controlled.setup_trigger(fgs_composite, parameters, initial_xyz) + + LOGGER.info("Starting grid scan") + yield from bps.stage( + fgs_composite.zocalo, group=ZOCALO_STAGE_GROUP + ) # connect to zocalo and make sure the queue is clear + yield from run_gridscan(fgs_composite, parameters, feature_controlled) + + LOGGER.info("Grid scan finished, getting results.") + + with TRACER.start_span("wait_for_zocalo"): + yield from bps.trigger_and_read( + [fgs_composite.zocalo], name=ZOCALO_READING_PLAN_NAME + ) + LOGGER.info("Zocalo triggered and read, interpreting results.") + xray_centre, bbox_size = yield from get_processing_result(fgs_composite.zocalo) + LOGGER.info(f"Got xray centre: {xray_centre}, bbox size: {bbox_size}") + if xray_centre is not None: + xray_centre = parameters.FGS_params.grid_position_to_motor_position( + xray_centre + ) + else: + xray_centre = initial_xyz + LOGGER.warning("No X-ray centre recieved") + if bbox_size is not None: + with TRACER.start_span("change_aperture"): + yield from set_aperture_for_bbox_size( + fgs_composite.aperture_scatterguard, bbox_size + ) + else: + LOGGER.warning("No bounding box size recieved") + + # once we have the results, go to the appropriate position + LOGGER.info("Moving to centre of mass.") + with TRACER.start_span("move_to_result"): + x, y, z = xray_centre + yield from move_x_y_z(fgs_composite.sample_motors, x, y, z, wait=True) + + if parameters.FGS_params.set_stub_offsets: + LOGGER.info("Recentring smargon co-ordinate system to this point.") + yield from bps.mv( + fgs_composite.sample_motors.stub_offsets, StubPosition.CURRENT_AS_CENTER + ) + + # Turn off dev/shm streaming to avoid filling disk, see https://github.com/DiamondLightSource/hyperion/issues/1395 + LOGGER.info("Turning off Eiger dev/shm streaming") + yield from bps.abs_set(fgs_composite.eiger.odin.fan.dev_shm_enable, 0) + + # Wait on everything before returning to GDA (particularly apertures), can be removed + # when we do not return to GDA here + yield from bps.wait() + + +@bpp.set_run_key_decorator(CONST.PLAN.GRIDSCAN_MAIN) +@bpp.run_decorator(md={"subplan_name": CONST.PLAN.GRIDSCAN_MAIN}) +def run_gridscan( + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, + feature_controlled: _FeatureControlled, + md={ # noqa + "plan_name": CONST.PLAN.GRIDSCAN_MAIN, + }, +): + sample_motors = fgs_composite.sample_motors + + # Currently gridscan only works for omega 0, see # + with TRACER.start_span("moving_omega_to_0"): + yield from bps.abs_set(sample_motors.omega, 0) + + # We only subscribe to the communicator callback for run_gridscan, so this is where + # we should generate an event reading the values which need to be included in the + # ispyb deposition + with TRACER.start_span("ispyb_hardware_readings"): + yield from read_hardware_pre_collection( + fgs_composite.undulator, + fgs_composite.synchrotron, + fgs_composite.s4_slit_gaps, + fgs_composite.robot, + fgs_composite.smargon, + ) + + read_during_collection = partial( + read_hardware_during_collection, + fgs_composite.aperture_scatterguard, + fgs_composite.attenuator, + fgs_composite.flux, + fgs_composite.dcm, + fgs_composite.eiger, + ) + + LOGGER.info("Setting fgs params") + yield from feature_controlled.set_flyscan_params() + + LOGGER.info("Waiting for gridscan validity check") + yield from wait_for_gridscan_valid(feature_controlled.fgs_motors) + + LOGGER.info("Waiting for arming to finish") + yield from bps.wait(CONST.WAIT.GRID_READY_FOR_DC) + yield from bps.stage(fgs_composite.eiger) + + yield from kickoff_and_complete_gridscan( + feature_controlled.fgs_motors, + fgs_composite.eiger, + fgs_composite.synchrotron, + [parameters.scan_points_first_grid, parameters.scan_points_second_grid], + parameters.scan_indices, + do_during_run=read_during_collection, + ) + yield from bps.abs_set(feature_controlled.fgs_motors.z_steps, 0, wait=False) + + +def kickoff_and_complete_gridscan( + gridscan: FastGridScanCommon, + eiger: EigerDetector, + synchrotron: Synchrotron, + scan_points: list[AxesPoints[Axis]], + scan_start_indices: list[int], + do_during_run: Callable[[], MsgGenerator] | None = None, +): + @TRACER.start_as_current_span(CONST.PLAN.DO_FGS) + @bpp.set_run_key_decorator(CONST.PLAN.DO_FGS) + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.DO_FGS, + "scan_points": scan_points, + "scan_start_indices": scan_start_indices, + } + ) + @bpp.contingency_decorator( + except_plan=lambda e: (yield from bps.stop(eiger)), + else_plan=lambda: (yield from bps.unstage(eiger)), + ) + def do_fgs(): + # Check topup gate + expected_images = yield from bps.rd(gridscan.expected_images) + exposure_sec_per_image = yield from bps.rd(eiger.cam.acquire_time) + LOGGER.info("waiting for topup if necessary...") + yield from check_topup_and_wait_if_necessary( + synchrotron, + expected_images * exposure_sec_per_image, + 30.0, + ) + yield from read_hardware_for_zocalo(eiger) + LOGGER.info("Wait for all moves with no assigned group") + yield from bps.wait() + LOGGER.info("kicking off FGS") + yield from bps.kickoff(gridscan, wait=True) + gridscan_start_time = time() + LOGGER.info("Waiting for Zocalo device queue to have been cleared...") + yield from bps.wait( + ZOCALO_STAGE_GROUP + ) # Make sure ZocaloResults queue is clear and ready to accept our new data + if do_during_run: + LOGGER.info(f"Running {do_during_run} during FGS") + yield from do_during_run() + LOGGER.info("completing FGS") + yield from bps.complete(gridscan, wait=True) + + # Remove this logging statement once metrics have been added + LOGGER.info( + f"Gridscan motion program took {round(time()-gridscan_start_time,2)} to complete" + ) + + yield from do_fgs() + + +def wait_for_gridscan_valid(fgs_motors: FastGridScanCommon, timeout=0.5): + LOGGER.info("Waiting for valid fgs_params") + SLEEP_PER_CHECK = 0.1 + times_to_check = int(timeout / SLEEP_PER_CHECK) + for _ in range(times_to_check): + scan_invalid = yield from bps.rd(fgs_motors.scan_invalid) + pos_counter = yield from bps.rd(fgs_motors.position_counter) + LOGGER.debug( + f"Scan invalid: {scan_invalid} and position counter: {pos_counter}" + ) + if not scan_invalid and pos_counter == 0: + LOGGER.info("Gridscan scan valid and position counter reset") + return + yield from bps.sleep(SLEEP_PER_CHECK) + raise WarningException("Scan invalid - pin too long/short/bent and out of range") + + +def set_aperture_for_bbox_size( + aperture_device: ApertureScatterguard, + bbox_size: list[int] | np.ndarray, +): + # bbox_size is [x,y,z], for i03 we only care about x + new_selected_aperture = ( + AperturePosition.MEDIUM if bbox_size[0] < 2 else AperturePosition.LARGE + ) + gda_name = aperture_device.get_gda_name_for_position(new_selected_aperture) + LOGGER.info( + f"Setting aperture to {new_selected_aperture} based on bounding box size {bbox_size}." + ) + + @bpp.set_run_key_decorator("change_aperture") + @bpp.run_decorator( + md={ + "subplan_name": "change_aperture", + "aperture_size": gda_name, + } + ) + def set_aperture(): + yield from bps.abs_set(aperture_device, new_selected_aperture) + + yield from set_aperture() + + +@dataclasses.dataclass +class _FeatureControlled: + class _ZebraSetup(Protocol): + def __call__( + self, zebra: Zebra, group="setup_zebra_for_gridscan", wait=True + ) -> MsgGenerator: ... + + class _ExtraSetup(Protocol): + def __call__( + self, + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, + initial_xyz: np.ndarray, + ) -> MsgGenerator: ... + + setup_trigger: _ExtraSetup + tidy_plan: Callable[[FlyScanXRayCentreComposite], MsgGenerator] + set_flyscan_params: Callable[[], MsgGenerator] + fgs_motors: FastGridScanCommon + + +def _get_feature_controlled( + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, +): + if parameters.features.use_panda_for_gridscan: + return _FeatureControlled( + setup_trigger=_panda_triggering_setup, + tidy_plan=_panda_tidy, + set_flyscan_params=partial( + set_flyscan_params, + fgs_composite.panda_fast_grid_scan, + parameters.panda_FGS_params, + ), + fgs_motors=fgs_composite.panda_fast_grid_scan, + ) + else: + return _FeatureControlled( + setup_trigger=_zebra_triggering_setup, + tidy_plan=partial(_generic_tidy, group="flyscan_zebra_tidy", wait=True), + set_flyscan_params=partial( + set_flyscan_params, + fgs_composite.zebra_fast_grid_scan, + parameters.FGS_params, + ), + fgs_motors=fgs_composite.zebra_fast_grid_scan, + ) + + +def _generic_tidy( + fgs_composite: FlyScanXRayCentreComposite, group, wait=True +) -> MsgGenerator: + LOGGER.info("Tidying up Zebra") + yield from set_zebra_shutter_to_manual(fgs_composite.zebra, group=group, wait=wait) + LOGGER.info("Tidying up Zocalo") + # make sure we don't consume any other results + yield from bps.unstage(fgs_composite.zocalo, group=group, wait=wait) + + +def _panda_tidy(fgs_composite: FlyScanXRayCentreComposite): + group = "panda_flyscan_tidy" + LOGGER.info("Disabling panda blocks") + yield from disarm_panda_for_gridscan(fgs_composite.panda, group) + yield from _generic_tidy(fgs_composite, group, False) + yield from bps.wait(group, timeout=10) + yield from bps.unstage(fgs_composite.panda) + + +def _zebra_triggering_setup( + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, + initial_xyz: np.ndarray, +): + yield from setup_zebra_for_gridscan(fgs_composite.zebra, wait=True) + + +def _panda_triggering_setup( + fgs_composite: FlyScanXRayCentreComposite, + parameters: ThreeDGridScan, + initial_xyz: np.ndarray, +): + LOGGER.info("Setting up Panda for flyscan") + + run_up_distance_mm = yield from bps.rd( + fgs_composite.panda_fast_grid_scan.run_up_distance_mm + ) + + # Set the time between x steps pv + DEADTIME_S = 1e-6 # according to https://www.dectris.com/en/detectors/x-ray-detectors/eiger2/eiger2-for-synchrotrons/eiger2-x/ + + time_between_x_steps_ms = (DEADTIME_S + parameters.exposure_time_s) * 1e3 + + smargon_speed_limit_mm_per_s = yield from bps.rd( + fgs_composite.smargon.x.max_velocity + ) + + sample_velocity_mm_per_s = ( + parameters.panda_FGS_params.x_step_size * 1e3 / time_between_x_steps_ms + ) + if sample_velocity_mm_per_s > smargon_speed_limit_mm_per_s: + raise SmargonSpeedException( + f"Smargon speed was calculated from x step size\ + {parameters.panda_FGS_params.x_step_size} and\ + time_between_x_steps_ms {time_between_x_steps_ms} as\ + {sample_velocity_mm_per_s}. The smargon's speed limit is\ + {smargon_speed_limit_mm_per_s} mm/s." + ) + else: + LOGGER.info( + f"Panda grid scan: Smargon speed set to {smargon_speed_limit_mm_per_s} mm/s" + f" and using a run-up distance of {run_up_distance_mm}" + ) + + yield from bps.mv( + fgs_composite.panda_fast_grid_scan.time_between_x_steps_ms, + time_between_x_steps_ms, + ) + + directory_provider_root = Path(parameters.storage_directory) + yield from set_panda_directory(directory_provider_root) + + yield from setup_panda_for_flyscan( + fgs_composite.panda, + parameters.panda_FGS_params, + initial_xyz[0], + parameters.exposure_time_s, + time_between_x_steps_ms, + sample_velocity_mm_per_s, + ) + + LOGGER.info("Setting up Zebra for panda flyscan") + yield from setup_zebra_for_panda_flyscan(fgs_composite.zebra, wait=True) diff --git a/src/mx_bluesky/hyperion/experiment_plans/grid_detect_then_xray_centre_plan.py b/src/mx_bluesky/hyperion/experiment_plans/grid_detect_then_xray_centre_plan.py new file mode 100644 index 000000000..27b34f032 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/grid_detect_then_xray_centre_plan.py @@ -0,0 +1,204 @@ +from __future__ import annotations + +import dataclasses +from pathlib import Path + +from blueapi.core import BlueskyContext, MsgGenerator +from bluesky import plan_stubs as bps +from bluesky import preprocessors as bpp +from dodal.devices.aperturescatterguard import ApertureScatterguard +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight, BacklightPosition +from dodal.devices.dcm import DCM +from dodal.devices.detector.detector_motion import DetectorMotion +from dodal.devices.eiger import EigerDetector +from dodal.devices.fast_grid_scan import PandAFastGridScan, ZebraFastGridScan +from dodal.devices.flux import Flux +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_parameters import OAV_CONFIG_JSON, OAVParameters +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.robot import BartRobot +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.undulator import Undulator +from dodal.devices.xbpm_feedback import XBPMFeedback +from dodal.devices.zebra import Zebra +from dodal.devices.zocalo import ZocaloResults +from ophyd_async.panda import HDFPanda + +from mx_bluesky.hyperion.device_setup_plans.manipulate_sample import ( + move_aperture_if_required, +) +from mx_bluesky.hyperion.device_setup_plans.utils import ( + start_preparing_data_collection_then_do_plan, +) +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + FlyScanXRayCentreComposite as FlyScanXRayCentreComposite, +) +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + flyscan_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.oav_grid_detection_plan import ( + OavGridDetectionComposite, + grid_detection_plan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.grid_detection_callback import ( + GridDetectionCallback, + GridParamUpdate, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + ThreeDGridScan, +) +from mx_bluesky.hyperion.utils.context import device_composite_from_context + + +@dataclasses.dataclass +class GridDetectThenXRayCentreComposite: + """All devices which are directly or indirectly required by this plan""" + + aperture_scatterguard: ApertureScatterguard + attenuator: Attenuator + backlight: Backlight + dcm: DCM + detector_motion: DetectorMotion + eiger: EigerDetector + zebra_fast_grid_scan: ZebraFastGridScan + flux: Flux + oav: OAV + pin_tip_detection: PinTipDetection + smargon: Smargon + synchrotron: Synchrotron + s4_slit_gaps: S4SlitGaps + undulator: Undulator + xbpm_feedback: XBPMFeedback + zebra: Zebra + zocalo: ZocaloResults + panda: HDFPanda + panda_fast_grid_scan: PandAFastGridScan + robot: BartRobot + + +def create_devices(context: BlueskyContext) -> GridDetectThenXRayCentreComposite: + return device_composite_from_context(context, GridDetectThenXRayCentreComposite) + + +def create_parameters_for_flyscan_xray_centre( + grid_scan_with_edge_params: GridScanWithEdgeDetect, + grid_parameters: GridParamUpdate, +) -> ThreeDGridScan: + params_json = grid_scan_with_edge_params.dict() + params_json.update(grid_parameters) + flyscan_xray_centre_parameters = ThreeDGridScan(**params_json) + LOGGER.info(f"Parameters for FGS: {flyscan_xray_centre_parameters}") + return flyscan_xray_centre_parameters + + +def detect_grid_and_do_gridscan( + composite: GridDetectThenXRayCentreComposite, + parameters: GridScanWithEdgeDetect, + oav_params: OAVParameters, +): + snapshot_template = f"{parameters.detector_params.prefix}_{parameters.detector_params.run_number}_{{angle}}" + + grid_params_callback = GridDetectionCallback(composite.oav.parameters) + + @bpp.subs_decorator([grid_params_callback]) + def run_grid_detection_plan( + oav_params, + snapshot_template, + snapshot_dir: Path, + ): + grid_detect_composite = OavGridDetectionComposite( + backlight=composite.backlight, + oav=composite.oav, + smargon=composite.smargon, + pin_tip_detection=composite.pin_tip_detection, + ) + + yield from grid_detection_plan( + grid_detect_composite, + oav_params, + snapshot_template, + str(snapshot_dir), + grid_width_microns=parameters.grid_width_um, + ) + + yield from run_grid_detection_plan( + oav_params, + snapshot_template, + parameters.snapshot_directory, + ) + + yield from bps.abs_set(composite.backlight, BacklightPosition.OUT) + + yield from move_aperture_if_required( + composite.aperture_scatterguard, + parameters.selected_aperture, + group=CONST.WAIT.GRID_READY_FOR_DC, + ) + + yield from flyscan_xray_centre( + FlyScanXRayCentreComposite( + aperture_scatterguard=composite.aperture_scatterguard, + attenuator=composite.attenuator, + backlight=composite.backlight, + eiger=composite.eiger, + panda_fast_grid_scan=composite.panda_fast_grid_scan, + flux=composite.flux, + s4_slit_gaps=composite.s4_slit_gaps, + smargon=composite.smargon, + undulator=composite.undulator, + synchrotron=composite.synchrotron, + xbpm_feedback=composite.xbpm_feedback, + zebra=composite.zebra, + zocalo=composite.zocalo, + panda=composite.panda, + zebra_fast_grid_scan=composite.zebra_fast_grid_scan, + dcm=composite.dcm, + robot=composite.robot, + ), + create_parameters_for_flyscan_xray_centre( + parameters, grid_params_callback.get_grid_parameters() + ), + ) + + +def grid_detect_then_xray_centre( + composite: GridDetectThenXRayCentreComposite, + parameters: GridScanWithEdgeDetect, + oav_config: str = OAV_CONFIG_JSON, +) -> MsgGenerator: + """ + A plan which combines the collection of snapshots from the OAV and the determination + of the grid dimensions to use for the following grid scan. + """ + + eiger: EigerDetector = composite.eiger + + eiger.set_detector_parameters(parameters.detector_params) + + oav_params = OAVParameters("xrayCentring", oav_config) + + plan_to_perform = ispyb_activation_wrapper( + detect_grid_and_do_gridscan( + composite, + parameters, + oav_params, + ), + parameters, + ) + + return start_preparing_data_collection_then_do_plan( + eiger, + composite.detector_motion, + parameters.detector_params.detector_distance, + plan_to_perform, + group=CONST.WAIT.GRID_READY_FOR_DC, + ) diff --git a/src/mx_bluesky/hyperion/experiment_plans/oav_grid_detection_plan.py b/src/mx_bluesky/hyperion/experiment_plans/oav_grid_detection_plan.py new file mode 100644 index 000000000..2b248da29 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/oav_grid_detection_plan.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import dataclasses +import math +from typing import TYPE_CHECKING + +import bluesky.plan_stubs as bps +import numpy as np +from blueapi.core import BlueskyContext +from dodal.devices.backlight import Backlight +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.oav.pin_image_recognition.utils import NONE_VALUE +from dodal.devices.oav.utils import PinNotFoundException, wait_for_tip_to_be_found +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.device_setup_plans.setup_oav import ( + pre_centring_setup_oav, +) +from mx_bluesky.hyperion.exceptions import catch_exception_and_warn +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.utils.context import device_composite_from_context + +if TYPE_CHECKING: + from dodal.devices.oav.oav_parameters import OAVParameters + + +@dataclasses.dataclass +class OavGridDetectionComposite: + """All devices which are directly or indirectly required by this plan""" + + backlight: Backlight + oav: OAV + smargon: Smargon + pin_tip_detection: PinTipDetection + + +def create_devices(context: BlueskyContext) -> OavGridDetectionComposite: + return device_composite_from_context(context, OavGridDetectionComposite) + + +def get_min_and_max_y_of_pin( + top: np.ndarray, bottom: np.ndarray, full_image_height_px: int +) -> tuple[int, int]: + """Gives the minimum and maximum y that would cover the whole pin. + + First filters out where no edge was found or the edge covers the full image. + If this results in no edges found then returns a min/max that covers the full image + """ + filtered_top = top[np.where((top != 0) & (top != NONE_VALUE))] + min_y = min(filtered_top) if len(filtered_top) else 0 + filtered_bottom = bottom[ + np.where((bottom != full_image_height_px) & (bottom != NONE_VALUE)) + ] + max_y = max(filtered_bottom) if len(filtered_bottom) else full_image_height_px + return min_y, max_y + + +def grid_detection_plan( + composite: OavGridDetectionComposite, + parameters: OAVParameters, + snapshot_template: str, + snapshot_dir: str, + grid_width_microns: float, + box_size_um: float = 20, +): + """ + Creates the parameters for two grids that are 90 degrees from each other and + encompass the whole of the sample as it appears in the OAV. + + Args: + composite (OavGridDetectionComposite): Composite containing devices for doing a grid detection. + parameters (OAVParameters): Object containing parameters for setting up the OAV + snapshot_template (str): A template for the name of the snapshots, expected to be filled in with an angle + snapshot_dir (str): The location to save snapshots + grid_width_microns (int): The width of the grid to scan in microns + box_size_um (float): The size of each box of the grid in microns + """ + oav: OAV = composite.oav + smargon: Smargon = composite.smargon + pin_tip_detection: PinTipDetection = composite.pin_tip_detection + + LOGGER.info("OAV Centring: Starting grid detection centring") + + yield from bps.wait() + + # Set relevant PVs to whatever the config dictates. + yield from pre_centring_setup_oav(oav, parameters, pin_tip_detection) + + LOGGER.info("OAV Centring: Camera set up") + + assert isinstance(oav.parameters.micronsPerXPixel, float) + box_size_x_pixels = box_size_um / oav.parameters.micronsPerXPixel + assert isinstance(oav.parameters.micronsPerYPixel, float) + box_size_y_pixels = box_size_um / oav.parameters.micronsPerYPixel + + grid_width_pixels = int(grid_width_microns / oav.parameters.micronsPerXPixel) + + # The FGS uses -90 so we need to match it + for angle in [0, -90]: + yield from bps.mv(smargon.omega, angle) + # need to wait for the OAV image to update + # See #673 for improvements + yield from bps.sleep(CONST.HARDWARE.OAV_REFRESH_DELAY) + + tip_x_px, tip_y_px = yield from catch_exception_and_warn( + PinNotFoundException, wait_for_tip_to_be_found, pin_tip_detection + ) + + LOGGER.info(f"Tip is at x,y: {tip_x_px},{tip_y_px}") + + top_edge = np.array((yield from bps.rd(pin_tip_detection.triggered_top_edge))) + bottom_edge = np.array( + (yield from bps.rd(pin_tip_detection.triggered_bottom_edge)) + ) + + full_image_height_px = yield from bps.rd(oav.cam.array_size.array_size_y) + + # only use the area from the start of the pin onwards + top_edge = top_edge[tip_x_px : tip_x_px + grid_width_pixels] + bottom_edge = bottom_edge[tip_x_px : tip_x_px + grid_width_pixels] + LOGGER.info(f"OAV Edge detection top: {list(top_edge)}") + LOGGER.info(f"OAV Edge detection bottom: {list(bottom_edge)}") + + min_y, max_y = ( + float(n) + for n in get_min_and_max_y_of_pin( + top_edge, bottom_edge, full_image_height_px + ) + ) + + grid_height_px: float = max_y - min_y + + y_steps: int = math.ceil(grid_height_px / box_size_y_pixels) + + # Panda not configured to run a half complete snake so enforce even rows on first grid + # See https://github.com/DiamondLightSource/hyperion/wiki/PandA-constant%E2%80%90motion-scanning#motion-program-summary + if y_steps % 2 and angle == 0: + LOGGER.debug( + f"Forcing number of rows in first grid to be even: Adding an extra row onto bottom of first grid and shifting grid upwards by {box_size_y_pixels/2}" + ) + y_steps += 1 + min_y -= box_size_y_pixels / 2 + max_y += box_size_y_pixels / 2 + grid_height_px += box_size_y_pixels + + LOGGER.info(f"Drawing snapshot {grid_width_pixels} by {grid_height_px}") + + x_steps = math.ceil(grid_width_pixels / box_size_x_pixels) + + upper_left = (tip_x_px, min_y) + + yield from bps.abs_set(oav.grid_snapshot.top_left_x, upper_left[0]) + yield from bps.abs_set(oav.grid_snapshot.top_left_y, upper_left[1]) + yield from bps.abs_set(oav.grid_snapshot.box_width, box_size_x_pixels) + yield from bps.abs_set(oav.grid_snapshot.num_boxes_x, x_steps) + yield from bps.abs_set(oav.grid_snapshot.num_boxes_y, y_steps) + + snapshot_filename = snapshot_template.format(angle=abs(angle)) + + yield from bps.abs_set(oav.grid_snapshot.filename, snapshot_filename) + yield from bps.abs_set(oav.grid_snapshot.directory, snapshot_dir) + yield from bps.trigger(oav.grid_snapshot, wait=True) + yield from bps.create(CONST.DESCRIPTORS.OAV_GRID_SNAPSHOT_TRIGGERED) + + yield from bps.read(oav.grid_snapshot) + yield from bps.read(smargon) + yield from bps.save() + + LOGGER.info( + f"Grid calculated at {angle}: {x_steps} by {y_steps} steps starting at {upper_left}px" + ) diff --git a/src/mx_bluesky/hyperion/experiment_plans/oav_snapshot_plan.py b/src/mx_bluesky/hyperion/experiment_plans/oav_snapshot_plan.py new file mode 100644 index 000000000..3d371d6fa --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/oav_snapshot_plan.py @@ -0,0 +1,87 @@ +from datetime import datetime +from typing import Protocol + +from blueapi.core import MsgGenerator +from bluesky import plan_stubs as bps +from dodal.devices.aperturescatterguard import AperturePosition, ApertureScatterguard +from dodal.devices.backlight import Backlight, BacklightPosition +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.device_setup_plans.setup_oav import setup_general_oav_params +from mx_bluesky.hyperion.parameters.components import WithSnapshot +from mx_bluesky.hyperion.parameters.constants import DocDescriptorNames + +OAV_SNAPSHOT_SETUP_GROUP = "oav_snapshot_setup" +OAV_SNAPSHOT_SETUP_SHOT = "oav_snapshot_setup_shot" +OAV_SNAPSHOT_GROUP = "oav_snapshot_group" + + +class OavSnapshotComposite(Protocol): + smargon: Smargon + oav: OAV + aperture_scatterguard: ApertureScatterguard + backlight: Backlight + + +def setup_oav_snapshot_plan( + composite: OavSnapshotComposite, + parameters: WithSnapshot, + max_omega_velocity_deg_s: float, +): + if not parameters.take_snapshots: + return + + yield from bps.abs_set( + composite.smargon.omega.velocity, max_omega_velocity_deg_s, wait=True + ) + yield from bps.abs_set( + composite.backlight, BacklightPosition.IN, group=OAV_SNAPSHOT_SETUP_GROUP + ) + yield from bps.abs_set( + composite.aperture_scatterguard, + AperturePosition.ROBOT_LOAD, + group=OAV_SNAPSHOT_SETUP_GROUP, + ) + + +def oav_snapshot_plan( + composite: OavSnapshotComposite, + parameters: WithSnapshot, + oav_parameters: OAVParameters, + wait: bool = True, +) -> MsgGenerator: + if not parameters.take_snapshots: + return + yield from bps.wait(group=OAV_SNAPSHOT_SETUP_GROUP) + yield from _setup_oav(composite, parameters, oav_parameters) + for omega in parameters.snapshot_omegas_deg or []: + yield from _take_oav_snapshot(composite, omega) + + +def _setup_oav( + composite: OavSnapshotComposite, + parameters: WithSnapshot, + oav_parameters: OAVParameters, +): + yield from setup_general_oav_params(composite.oav, oav_parameters) + yield from bps.abs_set( + composite.oav.snapshot.directory, str(parameters.snapshot_directory) + ) + + +def _take_oav_snapshot(composite: OavSnapshotComposite, omega: float): + yield from bps.abs_set( + composite.smargon.omega, omega, group=OAV_SNAPSHOT_SETUP_SHOT + ) + time_now = datetime.now() + filename = f"{time_now.strftime('%H%M%S')}_oav_snapshot_{omega:.0f}" + yield from bps.abs_set( + composite.oav.snapshot.filename, filename, group=OAV_SNAPSHOT_SETUP_SHOT + ) + yield from bps.wait(group=OAV_SNAPSHOT_SETUP_SHOT) + yield from bps.trigger(composite.oav.snapshot, wait=True) + yield from bps.create(DocDescriptorNames.OAV_ROTATION_SNAPSHOT_TRIGGERED) + yield from bps.read(composite.oav.snapshot) + yield from bps.save() diff --git a/src/mx_bluesky/hyperion/experiment_plans/optimise_attenuation_plan.py b/src/mx_bluesky/hyperion/experiment_plans/optimise_attenuation_plan.py new file mode 100644 index 000000000..66f75e4cc --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/optimise_attenuation_plan.py @@ -0,0 +1,463 @@ +import dataclasses +from enum import Enum + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import numpy as np +from blueapi.core import BlueskyContext +from dodal.devices.attenuator import Attenuator +from dodal.devices.xspress3.xspress3 import Xspress3 +from dodal.devices.zebra_controlled_shutter import ZebraShutter, ZebraShutterState + +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.utils.context import device_composite_from_context + + +class AttenuationOptimisationFailedException(Exception): + pass + + +class Direction(Enum): + POSITIVE = "positive" + NEGATIVE = "negative" + + +@dataclasses.dataclass +class OptimizeAttenuationComposite: + """All devices which are directly or indirectly required by this plan""" + + attenuator: Attenuator + sample_shutter: ZebraShutter + xspress3mini: Xspress3 + + +def create_devices(context: BlueskyContext) -> OptimizeAttenuationComposite: + return device_composite_from_context(context, OptimizeAttenuationComposite) + + +def check_parameters( + target, + upper_count_limit, + lower_count_limit, + default_high_roi, + default_low_roi, + initial_transmission, + upper_transmission, + lower_transmission, +): + if target < lower_count_limit or target > upper_count_limit: + raise ( + ValueError( + f"Target {target} is outside of lower and upper bounds: {lower_count_limit} to {upper_count_limit}" + ) + ) + + if default_high_roi < default_low_roi: + raise ValueError( + f"Upper roi {default_high_roi} must be greater than lower roi {default_low_roi}" + ) + + if upper_transmission < lower_transmission: + raise ValueError( + f"Upper transmission limit {upper_transmission} must be greater than lower tranmission limit {lower_transmission}" + ) + + if not upper_transmission >= initial_transmission >= lower_transmission: + raise ValueError( + f"initial transmission {initial_transmission} is outside range {lower_transmission} - {upper_transmission}" + ) + + +def is_counts_within_target(total_count, lower_count_limit, upper_count_limit) -> bool: + if lower_count_limit <= total_count and total_count <= upper_count_limit: + return True + else: + return False + + +def calculate_new_direction(direction: Direction, deadtime, deadtime_threshold): + if direction == Direction.POSITIVE: + if deadtime > deadtime_threshold: + direction = Direction.NEGATIVE + LOGGER.info( + "Found tranmission to go above deadtime threshold. Reducing transmission..." + ) + return direction + + +def deadtime_calc_new_transmission( + direction: Direction, + transmission: float, + increment: float, + upper_transmission_limit: float, + lower_transmission_limit: float, +) -> float: + """Calculate the new transmission value based on the current direction and increment. Raise error if transmission is too low. + + Args: + direction (Direction): + If positive, increase transmission by a factor of the increment. If negative, divide it + + transmission (float): + Current transmission value + + increment (float): + Factor to multiply or divide transmission by + + upper_transmission_limit (float): + Maximum allowed transmission, in order to protect sample. + + lower_transmission_limit (float): + Minimum expected transmission. Raise an error if transmission goes lower. + + Raises: + AttenuationOptimisationFailedException: + This error is thrown if the transmission goes below the expected value or if the maximum cycles are reached + + Returns: + transmission (float): Optimised transmission value + """ + if direction == Direction.POSITIVE: + transmission *= increment + if transmission > upper_transmission_limit: + transmission = upper_transmission_limit + else: + transmission /= increment + if transmission < lower_transmission_limit: + raise AttenuationOptimisationFailedException( + "Calculated transmission is below expected limit" + ) + return transmission + + +def do_device_optimise_iteration( + composite: OptimizeAttenuationComposite, + transmission, +): + def close_shutter(): + yield from bps.abs_set( + composite.sample_shutter, ZebraShutterState.CLOSE, wait=True + ) + + @bpp.finalize_decorator(close_shutter) + def open_and_run(): + """Set transmission, set number of images on xspress3mini, arm xspress3mini""" + yield from bps.abs_set( + composite.attenuator, transmission, group="set_transmission" + ) + yield from bps.abs_set(composite.xspress3mini.set_num_images, 1, wait=True) + yield from bps.abs_set( + composite.sample_shutter, ZebraShutterState.OPEN, wait=True + ) + yield from bps.stage(composite.xspress3mini, wait=True) + yield from bps.unstage(composite.xspress3mini, wait=True) + + yield from open_and_run() + + +def is_deadtime_optimised( + deadtime: float, + deadtime_threshold: float, + transmission: float, + upper_transmission_limit: float, + direction: Direction, +) -> bool: + if direction == Direction.POSITIVE: + if transmission == upper_transmission_limit: + LOGGER.warning( + f"Deadtime {deadtime} is above threshold {deadtime_threshold} at maximum transmission {upper_transmission_limit}. Using maximum transmission\ + as optimised value." + ) + return True + # Once direction is flipped and deadtime goes back above threshold, we consider attenuation to be optimised. + else: + if deadtime <= deadtime_threshold: + return True + return False + + +def deadtime_optimisation( + composite: OptimizeAttenuationComposite, + transmission: float, + increment: float, + deadtime_threshold: float, + max_cycles: int, + upper_transmission_limit: float, + lower_transmission_limit: float, +): + """Optimises the attenuation for the Xspress3Mini based on the detector deadtime + + Deadtime is the time after each event during which the detector cannot record another event. This loop adjusts the transmission of the attenuator + and checks the deadtime until the percentage deadtime is below the accepted threshold. To protect the sample, the transmission has a maximum value + + Here we use the percentage deadtime - the percentage of time to which the detector is unable to process events. + + This algorithm gradually increases the transmission until the percentage deadtime goes beneath the specified threshold. It then increases + the transmission and stops when the deadtime goes above the threshold. A smaller increment will provide a better optimised value, but take more + cycles to complete. + + Args: + attenuator: (Attenuator) Ophyd device + + xspress3mini: (Xspress3Mini) Ophyd device + + sample_shutter: (ZebraShutter) Ophyd_async device for the fast shutter + + transmission: (float) + The initial transmission value to use for the optimising + + increment: (float) + The factor to increase / decrease the transmission by each iteration + + deadtime_threshold: (float) + The maximum acceptable percentage deadtime + + max_cycles: (int) + The maximum number of iterations before an error is thrown + + upper_transmission_limit (float): + Maximum allowed transmission, in order to protect sample. + + lower_transmission_limit (float): + Minimum expected transmission. Raise an error if transmission goes lower. + + Raises: + AttenuationOptimisationFailedException: + This error is thrown if the transmission goes below the expected value or the maximum cycles are reached + + Returns: + optimised_transmission: (float) + The final transmission value which produces an acceptable deadtime + """ + + direction = Direction.POSITIVE + LOGGER.info(f"Target deadtime is {deadtime_threshold}") + optimised_transmission: float = 0 + for cycle in range(0, max_cycles): + yield from do_device_optimise_iteration(composite, transmission) + + total_time = yield from bps.rd(composite.xspress3mini.channels[1].total_time) + reset_ticks = yield from bps.rd(composite.xspress3mini.channels[1].reset_ticks) + + LOGGER.info(f"Current total time = {total_time}") + LOGGER.info(f"Current reset ticks = {reset_ticks}") + deadtime = 0 + + """ + The reset ticks PV stops ticking while the detector is unable to process events, so the absolute difference between the total time and the + reset ticks time gives the deadtime in unit time. Divide by total time to get it as a percentage. + """ + + if total_time != reset_ticks: + deadtime = 1 - abs(total_time - reset_ticks) / (total_time) + + LOGGER.info(f"Deadtime is now at {deadtime}") + + # Check if new deadtime is OK + + if is_deadtime_optimised( + deadtime, + deadtime_threshold, + transmission, + upper_transmission_limit, + direction, + ): + optimised_transmission = transmission + break + + if cycle == max_cycles - 1: + raise AttenuationOptimisationFailedException( + f"Unable to optimise attenuation after maximum cycles.\ + Deadtime did not get lower than threshold: {deadtime_threshold} in maximum cycles {max_cycles}" + ) + + direction = calculate_new_direction(direction, deadtime, deadtime_threshold) + + transmission = deadtime_calc_new_transmission( + direction, + transmission, + increment, + upper_transmission_limit, + lower_transmission_limit, + ) + + return optimised_transmission + + +def total_counts_optimisation( + composite: OptimizeAttenuationComposite, + transmission: float, + low_roi: int, + high_roi: int, + lower_count_limit: float, + upper_count_limit: float, + target_count: float, + max_cycles: int, + upper_transmission_limit: float, + lower_transmission_limit: float, +): + """Optimises the attenuation for the Xspress3Mini based on the total counts + + This loop adjusts the transmission of the attenuator and checks the total counts of the detector until the total counts as in the acceptable range, + defined by the lower and upper limit. To protect the sample, the transmission has a maximum value of 10%. + + Args: + attenuator: (Attenuator) Ophyd device + + xspress3mini: (Xspress3Mini) Ophyd device + + sample_shutter: (ZebraShutter) Ophyd_async device for the fast shutter + + transmission: (float) + The initial transmission value to use for the optimising + + low_roi: (float) + Lower region of interest at which to include in the counts + + high_roi: (float) + Upper region of interest at which to include in the counts + + lower_count_limit: (float) + The lowest acceptable value for count + + upper_count_limit: (float) + The highest acceptable value for count + + target_count: (int) + The ideal number of target counts - used to calculate the transmission for the subsequent iteration. + + max_cycles: (int) + The maximum number of iterations before an error is thrown + + upper_transmission_limit: (float) + The maximum allowed value for the transmission + + lower_transmission_limit: (float) + The minimum allowed value for the transmission + + Returns: + optimised_transmission: (float) + The final transmission value which produces an acceptable total_count value + """ + + LOGGER.info("Using total count optimisation") + optimised_transmission: float = 0 + for cycle in range(0, max_cycles): + LOGGER.info( + f"Setting transmission to {transmission} for attenuation optimisation cycle {cycle}" + ) + + yield from do_device_optimise_iteration(composite, transmission) + + data = np.array( + (yield from bps.rd(composite.xspress3mini.dt_corrected_latest_mca[1])) + ) + total_count = sum(data[int(low_roi) : int(high_roi)]) + LOGGER.info(f"Total count is {total_count}") + + if is_counts_within_target(total_count, lower_count_limit, upper_count_limit): + optimised_transmission = transmission + LOGGER.info( + f"Total count is within accepted limits: {lower_count_limit}, {total_count}, {upper_count_limit}" + ) + break + elif transmission == upper_transmission_limit: + LOGGER.warning( + f"Total count is not within limits: {lower_count_limit} <= {total_count} <= {upper_count_limit}\ + after using maximum transmission {upper_transmission_limit}. Continuing\ + with maximum transmission as optimised value..." + ) + optimised_transmission = transmission + break + + else: + transmission = (target_count / (total_count)) * transmission + if transmission > upper_transmission_limit: + transmission = upper_transmission_limit + elif transmission < lower_transmission_limit: + raise AttenuationOptimisationFailedException( + f"Transmission has gone below lower threshold {lower_transmission_limit}" + ) + + if cycle == max_cycles - 1: + raise AttenuationOptimisationFailedException( + f"Unable to optimise attenuation after maximum cycles.\ + Total count is not within limits: {lower_count_limit} <= {total_count}\ + <= {upper_count_limit}" + ) + + return optimised_transmission + + +def optimise_attenuation_plan( + composite: OptimizeAttenuationComposite, + collection_time=1, # Comes from self.parameters.acquisitionTime in fluorescence_spectrum.py + optimisation_type="deadtime", + low_roi=100, + high_roi=2048, + upper_transmission_limit=0.1, + lower_transmission_limit=1.0e-6, + initial_transmission=0.1, + target_count=20000, + lower_count_limit=20000, + upper_count_limit=50000, + max_cycles=10, + increment=2, + deadtime_threshold=0.002, +): + check_parameters( + target_count, + upper_count_limit, + lower_count_limit, + high_roi, + low_roi, + initial_transmission, + upper_transmission_limit, + lower_transmission_limit, + ) + + yield from bps.abs_set( + composite.xspress3mini.acquire_time, collection_time, wait=True + ) # Don't necessarily need to wait here + optimised_transmission: float = 0 + # Do the attenuation optimisation using count threshold + if optimisation_type == "total_counts": + LOGGER.info( + f"Starting Xspress3Mini total counts optimisation routine \nOptimisation will be performed across ROI channels {low_roi} - {high_roi}" + ) + + optimised_transmission = yield from total_counts_optimisation( + composite, + initial_transmission, + low_roi, + high_roi, + lower_count_limit, + upper_count_limit, + target_count, + max_cycles, + upper_transmission_limit, + lower_transmission_limit, + ) + + elif optimisation_type == "deadtime": + LOGGER.info( + f"Starting Xspress3Mini deadtime optimisation routine \nOptimisation will be performed across ROI channels {low_roi} - {high_roi}" + ) + optimised_transmission = yield from deadtime_optimisation( + composite, + initial_transmission, + upper_transmission_limit, + lower_transmission_limit, + increment, + deadtime_threshold, + max_cycles, + ) + + yield from bps.abs_set( + composite.attenuator, + optimised_transmission, + group="set_transmission", + wait=True, + ) + + return optimised_transmission diff --git a/src/mx_bluesky/hyperion/experiment_plans/pin_centre_then_xray_centre_plan.py b/src/mx_bluesky/hyperion/experiment_plans/pin_centre_then_xray_centre_plan.py new file mode 100644 index 000000000..96e3aad29 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/pin_centre_then_xray_centre_plan.py @@ -0,0 +1,104 @@ +from __future__ import annotations + +import json + +from blueapi.core import BlueskyContext, MsgGenerator +from dodal.devices.eiger import EigerDetector +from dodal.devices.oav.oav_parameters import OAV_CONFIG_JSON, OAVParameters + +from mx_bluesky.hyperion.device_setup_plans.utils import ( + start_preparing_data_collection_then_do_plan, +) +from mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan import ( + GridDetectThenXRayCentreComposite, + detect_grid_and_do_gridscan, +) +from mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan import ( + PinTipCentringComposite, + pin_tip_centre_plan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + PinTipCentreThenXrayCentre, +) +from mx_bluesky.hyperion.utils.context import device_composite_from_context + + +def create_devices(context: BlueskyContext) -> GridDetectThenXRayCentreComposite: + """ + GridDetectThenXRayCentreComposite contains all the devices we need, reuse that. + """ + return device_composite_from_context(context, GridDetectThenXRayCentreComposite) + + +def create_parameters_for_grid_detection( + pin_centre_parameters: PinTipCentreThenXrayCentre, +) -> GridScanWithEdgeDetect: + params_json = json.loads(pin_centre_parameters.json()) + del params_json["tip_offset_um"] + grid_detect_and_xray_centre = GridScanWithEdgeDetect(**params_json) + LOGGER.info( + f"Parameters for grid detect and xray centre: {grid_detect_and_xray_centre.json(indent=2)}" + ) + return grid_detect_and_xray_centre + + +def pin_centre_then_xray_centre_plan( + composite: GridDetectThenXRayCentreComposite, + parameters: PinTipCentreThenXrayCentre, + oav_config_file: str = OAV_CONFIG_JSON, +): + """Plan that perfoms a pin tip centre followed by an xray centre to completely + centre the sample""" + oav_config_file = parameters.oav_centring_file + + pin_tip_centring_composite = PinTipCentringComposite( + oav=composite.oav, + smargon=composite.smargon, + backlight=composite.backlight, + pin_tip_detection=composite.pin_tip_detection, + ) + + def _pin_centre_then_xray_centre_plan(): + yield from pin_tip_centre_plan( + pin_tip_centring_composite, + parameters.tip_offset_um, + oav_config_file, + ) + + grid_detect_params = create_parameters_for_grid_detection(parameters) + + oav_params = OAVParameters("xrayCentring", oav_config_file) + + yield from detect_grid_and_do_gridscan( + composite, + grid_detect_params, + oav_params, + ) + + yield from ispyb_activation_wrapper(_pin_centre_then_xray_centre_plan(), parameters) + + +def pin_tip_centre_then_xray_centre( + composite: GridDetectThenXRayCentreComposite, + parameters: PinTipCentreThenXrayCentre, + oav_config_file: str = OAV_CONFIG_JSON, +) -> MsgGenerator: + """Starts preparing for collection then performs the pin tip centre and xray centre""" + + eiger: EigerDetector = composite.eiger + + eiger.set_detector_parameters(parameters.detector_params) + + return start_preparing_data_collection_then_do_plan( + eiger, + composite.detector_motion, + parameters.detector_params.detector_distance, + pin_centre_then_xray_centre_plan(composite, parameters, oav_config_file), + group=CONST.WAIT.GRID_READY_FOR_DC, + ) diff --git a/src/mx_bluesky/hyperion/experiment_plans/pin_tip_centring_plan.py b/src/mx_bluesky/hyperion/experiment_plans/pin_tip_centring_plan.py new file mode 100644 index 000000000..b45296a70 --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/pin_tip_centring_plan.py @@ -0,0 +1,164 @@ +import dataclasses +from collections.abc import Generator + +import bluesky.plan_stubs as bps +from blueapi.core import BlueskyContext +from bluesky.utils import Msg +from dodal.devices.backlight import Backlight +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_parameters import OAV_CONFIG_JSON, OAVParameters +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.oav.utils import ( + Pixel, + get_move_required_so_that_beam_is_at_pixel, + wait_for_tip_to_be_found, +) +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.device_setup_plans.setup_oav import pre_centring_setup_oav +from mx_bluesky.hyperion.device_setup_plans.smargon import ( + move_smargon_warn_on_out_of_range, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.utils.context import device_composite_from_context + +DEFAULT_STEP_SIZE = 0.5 + + +@dataclasses.dataclass +class PinTipCentringComposite: + """All devices which are directly or indirectly required by this plan""" + + backlight: Backlight + oav: OAV + smargon: Smargon + pin_tip_detection: PinTipDetection + + +def create_devices(context: BlueskyContext) -> PinTipCentringComposite: + return device_composite_from_context(context, PinTipCentringComposite) + + +def trigger_and_return_pin_tip( + pin_tip: PinTipDetection, +) -> Generator[Msg, None, Pixel]: + yield from bps.trigger(pin_tip, wait=True) + tip_x_y_px = yield from bps.rd(pin_tip.triggered_tip) + LOGGER.info(f"Pin tip found at {tip_x_y_px}") + return tip_x_y_px # type: ignore + + +def move_pin_into_view( + pin_tip_device: PinTipDetection, + smargon: Smargon, + step_size_mm: float = DEFAULT_STEP_SIZE, + max_steps: int = 2, +) -> Generator[Msg, None, Pixel]: + """Attempt to move the pin into view and return the tip location in pixels if found. + The gonio x is moved in a number of discrete steps to find the pin. If the move + would take it past its limit, it moves to the limit instead. + + Args: + pin_tip_device (PinTipDetection): The device being used to detect the pin + smargon (Smargon): The gonio to move the tip + step_size (float, optional): Distance to move the gonio (in mm) for each + step of the search. Defaults to 0.5. + max_steps (int, optional): The number of steps to search with. Defaults to 2. + + Raises: + WarningException: Error if the pin tip is never found + + Returns: + Tuple[int, int]: The location of the pin tip in pixels + """ + + def pin_tip_valid(pin_x: float): + return pin_x != 0 and pin_x != pin_tip_device.INVALID_POSITION[0] + + for _ in range(max_steps): + tip_x_px, tip_y_px = yield from trigger_and_return_pin_tip(pin_tip_device) + + if pin_tip_valid(tip_x_px): + return (tip_x_px, tip_y_px) + + if tip_x_px == 0: + # Pin is off in the -ve direction + step_size_mm = -step_size_mm + + smargon_x = yield from bps.rd(smargon.x.user_readback) + ideal_move_to_find_pin = float(smargon_x) + step_size_mm + high_limit = yield from bps.rd(smargon.x.high_limit_travel) + low_limit = yield from bps.rd(smargon.x.low_limit_travel) + move_within_limits = max(min(ideal_move_to_find_pin, high_limit), low_limit) + if move_within_limits != ideal_move_to_find_pin: + LOGGER.warning( + f"Pin tip is off screen, and moving {step_size_mm} mm would cross limits, " + f"moving to {move_within_limits} instead" + ) + yield from bps.mv(smargon.x, move_within_limits) + + # Some time for the view to settle after the move + yield from bps.sleep(CONST.HARDWARE.OAV_REFRESH_DELAY) + + tip_x_px, tip_y_px = yield from trigger_and_return_pin_tip(pin_tip_device) + + if not pin_tip_valid(tip_x_px): + raise WarningException( + "Pin tip centring failed - pin too long/short/bent and out of range" + ) + else: + return (tip_x_px, tip_y_px) + + +def pin_tip_centre_plan( + composite: PinTipCentringComposite, + tip_offset_microns: float, + oav_config_file: str = OAV_CONFIG_JSON, +): + """Finds the tip of the pin and moves to roughly the centre based on this tip. Does + this at both the current omega angle and +90 deg from this angle so as to get a + centre in 3D. + + Args: + tip_offset_microns (float): The x offset from the tip where the centre is assumed + to be. + """ + oav: OAV = composite.oav + smargon: Smargon = composite.smargon + oav_params = OAVParameters("pinTipCentring", oav_config_file) + + pin_tip_setup = composite.pin_tip_detection + pin_tip_detect = composite.pin_tip_detection + + assert oav.parameters.micronsPerXPixel is not None + tip_offset_px = int(tip_offset_microns / oav.parameters.micronsPerXPixel) + + def offset_and_move(tip: Pixel): + pixel_to_move_to = (tip[0] + tip_offset_px, tip[1]) + position_mm = yield from get_move_required_so_that_beam_is_at_pixel( + smargon, pixel_to_move_to, oav.parameters + ) + LOGGER.info(f"Tip centring moving to : {position_mm}") + yield from move_smargon_warn_on_out_of_range(smargon, position_mm) + + LOGGER.info(f"Tip offset in pixels: {tip_offset_px}") + + # need to wait for the OAV image to update + # See #673 for improvements + yield from bps.sleep(0.3) + + yield from pre_centring_setup_oav(oav, oav_params, pin_tip_setup) + + tip = yield from move_pin_into_view(pin_tip_detect, smargon) + yield from offset_and_move(tip) + + yield from bps.mvr(smargon.omega, 90) + + # need to wait for the OAV image to update + # See #673 for improvements + yield from bps.sleep(0.3) + + tip = yield from wait_for_tip_to_be_found(pin_tip_detect) + yield from offset_and_move(tip) diff --git a/src/mx_bluesky/hyperion/experiment_plans/robot_load_then_centre_plan.py b/src/mx_bluesky/hyperion/experiment_plans/robot_load_then_centre_plan.py new file mode 100644 index 000000000..1a45cb74c --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/robot_load_then_centre_plan.py @@ -0,0 +1,275 @@ +from __future__ import annotations + +import dataclasses +from datetime import datetime +from pathlib import Path +from typing import cast + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +from blueapi.core import BlueskyContext, MsgGenerator +from dodal.devices.aperturescatterguard import AperturePosition, ApertureScatterguard +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight +from dodal.devices.dcm import DCM +from dodal.devices.detector.detector_motion import DetectorMotion +from dodal.devices.eiger import EigerDetector +from dodal.devices.fast_grid_scan import PandAFastGridScan, ZebraFastGridScan +from dodal.devices.flux import Flux +from dodal.devices.focusing_mirror import FocusingMirrorWithStripes, VFMMirrorVoltages +from dodal.devices.motors import XYZPositioner +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.robot import BartRobot, SampleLocation +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon, StubPosition +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.thawer import Thawer +from dodal.devices.undulator import Undulator +from dodal.devices.undulator_dcm import UndulatorDCM +from dodal.devices.webcam import Webcam +from dodal.devices.xbpm_feedback import XBPMFeedback +from dodal.devices.zebra import Zebra +from dodal.devices.zocalo import ZocaloResults +from dodal.plans.motor_util_plans import MoveTooLarge, home_and_reset_wrapper +from ophyd_async.panda import HDFPanda + +from mx_bluesky.hyperion.device_setup_plans.utils import ( + start_preparing_data_collection_then_do_plan, +) +from mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan import ( + GridDetectThenXRayCentreComposite, +) +from mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan import ( + pin_centre_then_xray_centre_plan, +) +from mx_bluesky.hyperion.experiment_plans.set_energy_plan import ( + SetEnergyComposite, + read_energy, + set_energy_plan, +) +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import RobotLoadThenCentre + + +@dataclasses.dataclass +class RobotLoadThenCentreComposite: + # common fields + xbpm_feedback: XBPMFeedback + attenuator: Attenuator + + # GridDetectThenXRayCentreComposite fields + aperture_scatterguard: ApertureScatterguard + backlight: Backlight + detector_motion: DetectorMotion + eiger: EigerDetector + zebra_fast_grid_scan: ZebraFastGridScan + flux: Flux + oav: OAV + pin_tip_detection: PinTipDetection + smargon: Smargon + synchrotron: Synchrotron + s4_slit_gaps: S4SlitGaps + undulator: Undulator + zebra: Zebra + zocalo: ZocaloResults + panda: HDFPanda + panda_fast_grid_scan: PandAFastGridScan + thawer: Thawer + + # SetEnergyComposite fields + vfm: FocusingMirrorWithStripes + vfm_mirror_voltages: VFMMirrorVoltages + dcm: DCM + undulator_dcm: UndulatorDCM + + # RobotLoad fields + robot: BartRobot + webcam: Webcam + lower_gonio: XYZPositioner + + +def create_devices(context: BlueskyContext) -> RobotLoadThenCentreComposite: + from mx_bluesky.hyperion.utils.context import device_composite_from_context + + return device_composite_from_context(context, RobotLoadThenCentreComposite) + + +def wait_for_smargon_not_disabled(smargon: Smargon, timeout=60): + """Waits for the smargon disabled flag to go low. The robot hardware is responsible + for setting this to low when it is safe to move. It does this through a physical + connection between the robot and the smargon. + """ + LOGGER.info("Waiting for smargon enabled") + SLEEP_PER_CHECK = 0.1 + times_to_check = int(timeout / SLEEP_PER_CHECK) + for _ in range(times_to_check): + smargon_disabled = yield from bps.rd(smargon.disabled) + if not smargon_disabled: + LOGGER.info("Smargon now enabled") + return + yield from bps.sleep(SLEEP_PER_CHECK) + raise TimeoutError( + "Timed out waiting for smargon to become enabled after robot load" + ) + + +def take_robot_snapshots(oav: OAV, webcam: Webcam, directory: Path): + time_now = datetime.now() + snapshot_format = f"{time_now.strftime('%H%M%S')}_{{device}}_after_load" + for device in [oav.snapshot, webcam]: + yield from bps.abs_set( + device.filename, snapshot_format.format(device=device.name) + ) + yield from bps.abs_set(device.directory, str(directory)) + yield from bps.trigger(device, group="snapshots") + yield from bps.wait("snapshots") + + +def prepare_for_robot_load(composite: RobotLoadThenCentreComposite): + yield from bps.abs_set( + composite.aperture_scatterguard, + AperturePosition.ROBOT_LOAD, + group="prepare_robot_load", + ) + + yield from bps.mv(composite.smargon.stub_offsets, StubPosition.RESET_TO_ROBOT_LOAD) + + # fmt: off + yield from bps.mv(composite.smargon.x, 0, + composite.smargon.y, 0, + composite.smargon.z, 0, + composite.smargon.omega, 0, + composite.smargon.chi, 0, + composite.smargon.phi, 0) + # fmt: on + + yield from bps.wait("prepare_robot_load") + + +def do_robot_load( + composite: RobotLoadThenCentreComposite, + sample_location: SampleLocation, + demand_energy_ev: float | None, + thawing_time: float, +): + yield from bps.abs_set( + composite.robot, + sample_location, + group="robot_load", + ) + + if demand_energy_ev: + yield from set_energy_plan( + demand_energy_ev / 1000, + cast(SetEnergyComposite, composite), + ) + + yield from bps.wait("robot_load") + + yield from bps.abs_set( + composite.thawer.thaw_for_time_s, thawing_time, group="thawing_finished" + ) + yield from wait_for_smargon_not_disabled(composite.smargon) + + +def raise_exception_if_moved_out_of_cryojet(exception): + yield from bps.null() + if isinstance(exception, MoveTooLarge): + raise Exception( + f"Moving {exception.axis} back to {exception.position} after \ + robot load would move it out of the cryojet. The max safe \ + distance is {exception.maximum_move}" + ) + + +def robot_load_then_centre_plan( + composite: RobotLoadThenCentreComposite, + params: RobotLoadThenCentre, +): + yield from prepare_for_robot_load(composite) + + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.ROBOT_LOAD, + "metadata": { + "visit_path": params.ispyb_params.visit_path, + "sample_id": params.sample_id, + "sample_puck": params.sample_puck, + "sample_pin": params.sample_pin, + }, + "activate_callbacks": [ + "RobotLoadISPyBCallback", + ], + } + ) + def robot_load_and_snapshots(): + # TODO: get these from one source of truth #1347 + assert params.sample_puck is not None + assert params.sample_pin is not None + + robot_load_plan = do_robot_load( + composite, + SampleLocation(params.sample_puck, params.sample_pin), + params.demand_energy_ev, + params.thawing_time, + ) + + # The lower gonio must be in the correct position for the robot load and we + # want to put it back afterwards. Note we don't wait the robot is interlocked + # to the lower gonio and the move is quicker than the robot takes to get to the + # load position. + yield from bpp.contingency_wrapper( + home_and_reset_wrapper( + robot_load_plan, + composite.lower_gonio, + BartRobot.LOAD_TOLERANCE_MM, + CONST.HARDWARE.CRYOJET_MARGIN_MM, + "lower_gonio", + wait_for_all=False, + ), + except_plan=raise_exception_if_moved_out_of_cryojet, + ) + + yield from take_robot_snapshots( + composite.oav, composite.webcam, params.snapshot_directory + ) + + yield from bps.create(name=CONST.DESCRIPTORS.ROBOT_LOAD) + yield from bps.read(composite.robot.barcode) + yield from bps.read(composite.oav.snapshot) + yield from bps.read(composite.webcam) + yield from bps.save() + + yield from bps.wait("reset-lower_gonio") + + yield from robot_load_and_snapshots() + + yield from pin_centre_then_xray_centre_plan( + cast(GridDetectThenXRayCentreComposite, composite), + params.pin_centre_then_xray_centre_params(), + ) + + +def robot_load_then_centre( + composite: RobotLoadThenCentreComposite, + parameters: RobotLoadThenCentre, +) -> MsgGenerator: + eiger: EigerDetector = composite.eiger + + detector_params = parameters.detector_params + if not detector_params.expected_energy_ev: + actual_energy_ev = 1000 * ( + yield from read_energy(cast(SetEnergyComposite, composite)) + ) + detector_params.expected_energy_ev = actual_energy_ev + eiger.set_detector_parameters(detector_params) + + yield from start_preparing_data_collection_then_do_plan( + eiger, + composite.detector_motion, + parameters.detector_distance_mm, + robot_load_then_centre_plan(composite, parameters), + group=CONST.WAIT.GRID_READY_FOR_DC, + ) diff --git a/src/mx_bluesky/hyperion/experiment_plans/rotation_scan_plan.py b/src/mx_bluesky/hyperion/experiment_plans/rotation_scan_plan.py new file mode 100644 index 000000000..5df5ad7fc --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/rotation_scan_plan.py @@ -0,0 +1,424 @@ +from __future__ import annotations + +import dataclasses + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +from blueapi.core import BlueskyContext, MsgGenerator +from dodal.devices.aperturescatterguard import ApertureScatterguard +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight +from dodal.devices.dcm import DCM +from dodal.devices.detector.detector_motion import DetectorMotion +from dodal.devices.eiger import EigerDetector +from dodal.devices.flux import Flux +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.robot import BartRobot +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon +from dodal.devices.synchrotron import Synchrotron +from dodal.devices.undulator import Undulator +from dodal.devices.zebra import RotationDirection, Zebra +from dodal.plans.check_topup import check_topup_and_wait_if_necessary + +from mx_bluesky.hyperion.device_setup_plans.manipulate_sample import ( + begin_sample_environment_setup, + cleanup_sample_environment, + move_phi_chi_omega, + move_x_y_z, + setup_sample_environment, +) +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_for_zocalo, + read_hardware_pre_collection, +) +from mx_bluesky.hyperion.device_setup_plans.setup_zebra import ( + arm_zebra, + disarm_zebra, + make_trigger_safe, + setup_zebra_for_rotation, +) +from mx_bluesky.hyperion.experiment_plans.oav_snapshot_plan import ( + OavSnapshotComposite, + oav_snapshot_plan, + setup_oav_snapshot_plan, +) +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import ( + MultiRotationScan, + RotationScan, +) +from mx_bluesky.hyperion.utils.context import device_composite_from_context + + +@dataclasses.dataclass +class RotationScanComposite(OavSnapshotComposite): + """All devices which are directly or indirectly required by this plan""" + + aperture_scatterguard: ApertureScatterguard + attenuator: Attenuator + backlight: Backlight + dcm: DCM + detector_motion: DetectorMotion + eiger: EigerDetector + flux: Flux + robot: BartRobot + smargon: Smargon + undulator: Undulator + synchrotron: Synchrotron + s4_slit_gaps: S4SlitGaps + zebra: Zebra + oav: OAV + + +def create_devices(context: BlueskyContext) -> RotationScanComposite: + """Ensures necessary devices have been instantiated""" + + return device_composite_from_context(context, RotationScanComposite) + + +DEFAULT_DIRECTION = RotationDirection.NEGATIVE +DEFAULT_MAX_VELOCITY = 120 +# Use a slightly larger time to acceleration than EPICS as it's better to be cautious +ACCELERATION_MARGIN = 1.5 + + +@dataclasses.dataclass +class RotationMotionProfile: + start_scan_deg: float + start_motion_deg: float + scan_width_deg: float + shutter_time_s: float + direction: RotationDirection + speed_for_rotation_deg_s: float + acceleration_offset_deg: float + shutter_opening_deg: float + total_exposure_s: float + distance_to_move_deg: float + max_velocity_deg_s: float + + +def calculate_motion_profile( + params: RotationScan, + motor_time_to_speed_s: float, + max_velocity_deg_s: float, +) -> RotationMotionProfile: + """Calculates the various numbers needed for motions in the rotation scan. + Rotates through "scan width" plus twice an "offset" to take into account + acceleration at the start and deceleration at the end, plus the number of extra + degrees of rotation needed to make sure the fast shutter has fully opened before the + detector trigger is sent. + See https://github.com/DiamondLightSource/hyperion/wiki/rotation-scan-geometry + for a simple pictorial explanation.""" + + direction = params.rotation_direction.multiplier + num_images = params.num_images + shutter_time_s = params.shutter_opening_time_s + image_width_deg = params.rotation_increment_deg + exposure_time_s = params.exposure_time_s + motor_time_to_speed_s *= ACCELERATION_MARGIN + start_scan_deg = params.omega_start_deg + + LOGGER.info("Calculating rotation scan motion profile:") + LOGGER.info( + f"{num_images=}, {shutter_time_s=}, {image_width_deg=}, {exposure_time_s=}, {direction=}" + ) + + scan_width_deg = num_images * params.rotation_increment_deg + LOGGER.info(f"{scan_width_deg=} = {num_images=} * {params.rotation_increment_deg=}") + + speed_for_rotation_deg_s = image_width_deg / exposure_time_s + LOGGER.info("speed_for_rotation_deg_s = image_width_deg / exposure_time_s") + LOGGER.info( + f"{speed_for_rotation_deg_s=} = {image_width_deg=} / {exposure_time_s=}" + ) + + acceleration_offset_deg = motor_time_to_speed_s * speed_for_rotation_deg_s + LOGGER.info( + f"{acceleration_offset_deg=} = {motor_time_to_speed_s=} * {speed_for_rotation_deg_s=}" + ) + + start_motion_deg = start_scan_deg - (acceleration_offset_deg * direction) + LOGGER.info( + f"{start_motion_deg=} = {start_scan_deg=} - ({acceleration_offset_deg=} * {direction=})" + ) + + shutter_opening_deg = speed_for_rotation_deg_s * shutter_time_s + LOGGER.info( + f"{shutter_opening_deg=} = {speed_for_rotation_deg_s=} * {shutter_time_s=}" + ) + + shutter_opening_deg = speed_for_rotation_deg_s * shutter_time_s + LOGGER.info( + f"{shutter_opening_deg=} = {speed_for_rotation_deg_s=} * {shutter_time_s=}" + ) + + total_exposure_s = num_images * exposure_time_s + LOGGER.info(f"{total_exposure_s=} = {num_images=} * {exposure_time_s=}") + + distance_to_move_deg = ( + scan_width_deg + shutter_opening_deg + acceleration_offset_deg * 2 + ) * direction + LOGGER.info( + f"{distance_to_move_deg=} = ({scan_width_deg=} + {shutter_opening_deg=} + {acceleration_offset_deg=} * 2) * {direction=})" + ) + + return RotationMotionProfile( + start_scan_deg=start_scan_deg, + start_motion_deg=start_motion_deg, + scan_width_deg=scan_width_deg, + shutter_time_s=shutter_time_s, + direction=params.rotation_direction, + speed_for_rotation_deg_s=speed_for_rotation_deg_s, + acceleration_offset_deg=acceleration_offset_deg, + shutter_opening_deg=shutter_opening_deg, + total_exposure_s=total_exposure_s, + distance_to_move_deg=distance_to_move_deg, + max_velocity_deg_s=max_velocity_deg_s, + ) + + +def rotation_scan_plan( + composite: RotationScanComposite, + params: RotationScan, + motion_values: RotationMotionProfile, +): + """A stub plan to collect diffraction images from a sample continuously rotating + about a fixed axis - for now this axis is limited to omega. + Needs additional setup of the sample environment and a wrapper to clean up.""" + + @bpp.set_run_key_decorator(CONST.PLAN.ROTATION_MAIN) + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.ROTATION_MAIN, + "scan_points": [params.scan_points], + } + ) + def _rotation_scan_plan( + motion_values: RotationMotionProfile, + composite: RotationScanComposite, + ): + axis = composite.smargon.omega + + # can move to start as fast as possible + yield from bps.abs_set( + axis.velocity, motion_values.max_velocity_deg_s, wait=True + ) + LOGGER.info(f"moving omega to beginning, {motion_values.start_scan_deg=}") + yield from bps.abs_set( + axis, + motion_values.start_motion_deg, + group="move_to_rotation_start", + wait=True, + ) + + yield from setup_zebra_for_rotation( + composite.zebra, + start_angle=motion_values.start_scan_deg, + scan_width=motion_values.scan_width_deg, + direction=motion_values.direction, + shutter_opening_deg=motion_values.shutter_opening_deg, + shutter_opening_s=motion_values.shutter_time_s, + group="setup_zebra", + wait=True, + ) + + yield from setup_sample_environment( + composite.aperture_scatterguard, + params.selected_aperture, + composite.backlight, + ) + + LOGGER.info("Wait for any previous moves...") + # wait for all the setup tasks at once + yield from bps.wait(CONST.WAIT.MOVE_GONIO_TO_START) + yield from bps.wait("setup_senv") + yield from bps.wait("move_to_rotation_start") + + # get some information for the ispyb deposition and trigger the callback + yield from read_hardware_for_zocalo(composite.eiger) + + yield from read_hardware_pre_collection( + composite.undulator, + composite.synchrotron, + composite.s4_slit_gaps, + composite.robot, + composite.smargon, + ) + + # Get ready for the actual scan + yield from bps.abs_set( + axis.velocity, motion_values.speed_for_rotation_deg_s, wait=True + ) + + yield from bps.wait("setup_zebra") + yield from arm_zebra(composite.zebra) + + # Check topup gate + yield from check_topup_and_wait_if_necessary( + composite.synchrotron, + motion_values.total_exposure_s, + ops_time=10.0, # Additional time to account for rotation, is s + ) # See #https://github.com/DiamondLightSource/hyperion/issues/932 + + LOGGER.info("Executing rotation scan") + yield from bps.rel_set(axis, motion_values.distance_to_move_deg, wait=True) + + yield from read_hardware_during_collection( + composite.aperture_scatterguard, + composite.attenuator, + composite.flux, + composite.dcm, + composite.eiger, + ) + + yield from _rotation_scan_plan(motion_values, composite) + + +def _cleanup_plan(composite: RotationScanComposite, **kwargs): + LOGGER.info("Cleaning up after rotation scan") + max_vel = yield from bps.rd(composite.smargon.omega.max_velocity) + yield from cleanup_sample_environment(composite.detector_motion, group="cleanup") + yield from bps.abs_set(composite.smargon.omega.velocity, max_vel, group="cleanup") + yield from make_trigger_safe(composite.zebra, group="cleanup") + yield from bpp.finalize_wrapper(disarm_zebra(composite.zebra), bps.wait("cleanup")) + + +def _move_and_rotation( + composite: RotationScanComposite, + params: RotationScan, + oav_params: OAVParameters, +): + motor_time_to_speed = yield from bps.rd(composite.smargon.omega.acceleration_time) + max_vel = yield from bps.rd(composite.smargon.omega.max_velocity) + motion_values = calculate_motion_profile(params, motor_time_to_speed, max_vel) + + def _div_by_1000_if_not_none(num: float | None): + return num / 1000 if num else num + + LOGGER.info("moving to position (if specified)") + yield from move_x_y_z( + composite.smargon, + _div_by_1000_if_not_none(params.x_start_um), + _div_by_1000_if_not_none(params.y_start_um), + _div_by_1000_if_not_none(params.z_start_um), + group=CONST.WAIT.MOVE_GONIO_TO_START, + ) + yield from move_phi_chi_omega( + composite.smargon, + params.phi_start_deg, + params.chi_start_deg, + group=CONST.WAIT.MOVE_GONIO_TO_START, + ) + if params.take_snapshots: + yield from bps.wait(CONST.WAIT.MOVE_GONIO_TO_START) + yield from setup_oav_snapshot_plan( + composite, params, motion_values.max_velocity_deg_s + ) + yield from oav_snapshot_plan(composite, params, oav_params) + yield from rotation_scan_plan( + composite, + params, + motion_values, + ) + + +def rotation_scan( + composite: RotationScanComposite, + parameters: RotationScan, + oav_params: OAVParameters | None = None, +) -> MsgGenerator: + if not oav_params: + oav_params = OAVParameters(context="xrayCentring") + + @bpp.set_run_key_decorator("rotation_scan") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + CONST.TRIGGER.ZOCALO: CONST.PLAN.ROTATION_MAIN, + "hyperion_parameters": parameters.json(), + "activate_callbacks": [ + "RotationISPyBCallback", + "RotationNexusFileCallback", + ], + } + ) + def rotation_scan_plan_with_stage_and_cleanup( + params: RotationScan, + ): + eiger: EigerDetector = composite.eiger + eiger.set_detector_parameters(params.detector_params) + + @bpp.stage_decorator([eiger]) + @bpp.finalize_decorator(lambda: _cleanup_plan(composite)) + def rotation_with_cleanup_and_stage(params: RotationScan): + LOGGER.info("setting up sample environment...") + yield from begin_sample_environment_setup( + composite.detector_motion, + composite.attenuator, + params.transmission_frac, + params.detector_params.detector_distance, + ) + + yield from _move_and_rotation(composite, params, oav_params) + + LOGGER.info("setting up and staging eiger...") + yield from rotation_with_cleanup_and_stage(params) + + yield from rotation_scan_plan_with_stage_and_cleanup(parameters) + + +def multi_rotation_scan( + composite: RotationScanComposite, + parameters: MultiRotationScan, + oav_params: OAVParameters | None = None, +) -> MsgGenerator: + if not oav_params: + oav_params = OAVParameters(context="xrayCentring") + eiger: EigerDetector = composite.eiger + eiger.set_detector_parameters(parameters.detector_params) + LOGGER.info("setting up sample environment...") + yield from begin_sample_environment_setup( + composite.detector_motion, + composite.attenuator, + parameters.transmission_frac, + parameters.detector_params.detector_distance, + ) + + @bpp.set_run_key_decorator("multi_rotation_scan") + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.ROTATION_MULTI, + "full_num_of_images": parameters.num_images, + "meta_data_run_number": parameters.detector_params.run_number, + "activate_callbacks": [ + "RotationISPyBCallback", + "RotationNexusFileCallback", + ], + } + ) + @bpp.stage_decorator([eiger]) + @bpp.finalize_decorator(lambda: _cleanup_plan(composite)) + def _multi_rotation_scan(): + for single_scan in parameters.single_rotation_scans: + + @bpp.set_run_key_decorator("rotation_scan") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + CONST.TRIGGER.ZOCALO: CONST.PLAN.ROTATION_MAIN, + "hyperion_parameters": single_scan.json(), + } + ) + def rotation_scan_core( + params: RotationScan, + ): + yield from _move_and_rotation(composite, params, oav_params) + + yield from rotation_scan_core(single_scan) + + LOGGER.info("setting up and staging eiger...") + yield from _multi_rotation_scan() diff --git a/src/mx_bluesky/hyperion/experiment_plans/set_energy_plan.py b/src/mx_bluesky/hyperion/experiment_plans/set_energy_plan.py new file mode 100644 index 000000000..7cdaeaf8f --- /dev/null +++ b/src/mx_bluesky/hyperion/experiment_plans/set_energy_plan.py @@ -0,0 +1,68 @@ +"""Plan that comprises: +* Disable feedback +* Set undulator energy to the requested amount +* Adjust DCM and mirrors for the new energy +* reenable feedback +""" + +import dataclasses +from collections.abc import Generator +from typing import Any + +from bluesky import plan_stubs as bps +from bluesky.utils import Msg +from dodal.devices.attenuator import Attenuator +from dodal.devices.dcm import DCM +from dodal.devices.focusing_mirror import FocusingMirrorWithStripes, VFMMirrorVoltages +from dodal.devices.undulator_dcm import UndulatorDCM +from dodal.devices.xbpm_feedback import XBPMFeedback + +from mx_bluesky.hyperion.device_setup_plans import dcm_pitch_roll_mirror_adjuster +from mx_bluesky.hyperion.device_setup_plans.xbpm_feedback import ( + transmission_and_xbpm_feedback_for_collection_wrapper, +) + +DESIRED_TRANSMISSION_FRACTION = 0.1 + +UNDULATOR_GROUP = "UNDULATOR_GROUP" + + +@dataclasses.dataclass +class SetEnergyComposite: + vfm: FocusingMirrorWithStripes + vfm_mirror_voltages: VFMMirrorVoltages + dcm: DCM + undulator_dcm: UndulatorDCM + xbpm_feedback: XBPMFeedback + attenuator: Attenuator + + +def _set_energy_plan( + energy_kev, + composite: SetEnergyComposite, +): + yield from bps.abs_set(composite.undulator_dcm, energy_kev, group=UNDULATOR_GROUP) + yield from dcm_pitch_roll_mirror_adjuster.adjust_dcm_pitch_roll_vfm_from_lut( + composite.undulator_dcm, + composite.vfm, + composite.vfm_mirror_voltages, + energy_kev, + ) + yield from bps.wait(group=UNDULATOR_GROUP) + + +def read_energy(composite: SetEnergyComposite) -> Generator[Msg, Any, float]: + """Obtain the energy in kev""" + return (yield from bps.rd(composite.dcm.energy_in_kev)) # type: ignore + + +def set_energy_plan( + energy_kev, + composite: SetEnergyComposite, +): + yield from transmission_and_xbpm_feedback_for_collection_wrapper( + _set_energy_plan(energy_kev, composite), + composite.xbpm_feedback, + composite.attenuator, + DESIRED_TRANSMISSION_FRACTION, + ) diff --git a/src/mx_bluesky/hyperion/external_interaction/__init__.py b/src/mx_bluesky/hyperion/external_interaction/__init__.py new file mode 100644 index 000000000..e03996aa1 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/__init__.py @@ -0,0 +1,9 @@ +"""Provides external interaction functionality to Hyperion, including Nexus file +creation, ISPyB deposition, and Zocalo processing submissions. + +Functionality from this module can/should be used through the callback functions in +external_interaction.callbacks which can subscribe to the Bluesky RunEngine and handle +these various interactions based on the documents emitted by the RunEngine during the +execution of the experimental plan. It's not recommended to use the interaction classes +here directly in plans except through the use of such callbacks. +""" diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/__init__.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/__init__.py new file mode 100644 index 000000000..03322e7f7 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/__init__.py @@ -0,0 +1,10 @@ +"""Callbacks which can be subscribed to by the Bluesky RunEngine in order to perform +external interactions in response to the 'documents' emitted when events occur in the +execution of an experimental plan. + +Callbacks used for the Hyperion fast grid scan are prefixed with 'FGS'. +""" + +from .__main__ import main + +__all__ = ["main"] diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/__main__.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/__main__.py new file mode 100644 index 000000000..4464fc54d --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/__main__.py @@ -0,0 +1,148 @@ +import logging +from collections.abc import Callable, Sequence +from threading import Thread +from time import sleep + +from bluesky.callbacks.zmq import Proxy, RemoteDispatcher +from dodal.log import LOGGER as dodal_logger +from dodal.log import set_up_all_logging_handlers + +from mx_bluesky.hyperion.external_interaction.callbacks.log_uid_tag_callback import ( + LogUidTaggingCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback import ( + RobotLoadISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback import ( + GridscanNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback import ( + ZocaloCallback, +) +from mx_bluesky.hyperion.log import ( + ISPYB_LOGGER, + NEXUS_LOGGER, + _get_logging_dir, + tag_filter, +) +from mx_bluesky.hyperion.parameters.cli import parse_callback_dev_mode_arg +from mx_bluesky.hyperion.parameters.constants import CONST + +LIVENESS_POLL_SECONDS = 1 +ERROR_LOG_BUFFER_LINES = 5000 + + +def setup_callbacks(): + zocalo = ZocaloCallback() + return [ + GridscanNexusFileCallback(), + GridscanISPyBCallback(emit=zocalo), + RotationNexusFileCallback(), + RotationISPyBCallback(emit=zocalo), + LogUidTaggingCallback(), + RobotLoadISPyBCallback(), + ] + + +def setup_logging(dev_mode: bool): + for logger, filename in [ + (ISPYB_LOGGER, "hyperion_ispyb_callback.log"), + (NEXUS_LOGGER, "hyperion_nexus_callback.log"), + ]: + if logger.handlers == []: + handlers = set_up_all_logging_handlers( + logger, + _get_logging_dir(), + filename, + dev_mode, + error_log_buffer_lines=ERROR_LOG_BUFFER_LINES, + graylog_port=CONST.GRAYLOG_PORT, + ) + handlers["graylog_handler"].addFilter(tag_filter) + log_info(f"Loggers initialised with dev_mode={dev_mode}") + nexgen_logger = logging.getLogger("nexgen") + nexgen_logger.parent = NEXUS_LOGGER + dodal_logger.parent = ISPYB_LOGGER + log_debug("nexgen logger added to nexus logger") + + +def setup_threads(): + proxy = Proxy(*CONST.CALLBACK_0MQ_PROXY_PORTS) + dispatcher = RemoteDispatcher(f"localhost:{CONST.CALLBACK_0MQ_PROXY_PORTS[1]}") + log_debug("Created proxy and dispatcher objects") + + def start_proxy(): + proxy.start() + + def start_dispatcher(callbacks: list[Callable]): + [dispatcher.subscribe(cb) for cb in callbacks] + dispatcher.start() + + return proxy, dispatcher, start_proxy, start_dispatcher + + +def log_info(msg, *args, **kwargs): + ISPYB_LOGGER.info(msg, *args, **kwargs) + NEXUS_LOGGER.info(msg, *args, **kwargs) + + +def log_debug(msg, *args, **kwargs): + ISPYB_LOGGER.debug(msg, *args, **kwargs) + NEXUS_LOGGER.debug(msg, *args, **kwargs) + + +def wait_for_threads_forever(threads: Sequence[Thread]): + alive = [t.is_alive() for t in threads] + try: + log_debug("Trying to wait forever on callback and dispatcher threads") + while all(alive): + sleep(LIVENESS_POLL_SECONDS) + alive = [t.is_alive() for t in threads] + except KeyboardInterrupt: + log_info("Main thread recieved interrupt - exiting.") + else: + log_info("Proxy or dispatcher thread ended - exiting.") + + +class HyperionCallbackRunner: + """Runs Nexus, ISPyB and Zocalo callbacks in their own process.""" + + def __init__(self, dev_mode) -> None: + setup_logging(dev_mode) + log_info("Hyperion callback process started.") + + self.callbacks = setup_callbacks() + self.proxy, self.dispatcher, start_proxy, start_dispatcher = setup_threads() + log_info("Created 0MQ proxy and local RemoteDispatcher.") + + self.proxy_thread = Thread(target=start_proxy, daemon=True) + self.dispatcher_thread = Thread( + target=start_dispatcher, args=[self.callbacks], daemon=True + ) + + def start(self): + log_info(f"Launching threads, with callbacks: {self.callbacks}") + self.proxy_thread.start() + self.dispatcher_thread.start() + log_info("Proxy and dispatcher thread launched.") + wait_for_threads_forever([self.proxy_thread, self.dispatcher_thread]) + + +def main(dev_mode=False) -> None: + dev_mode = dev_mode or parse_callback_dev_mode_arg() + print(f"In dev mode: {dev_mode}") + runner = HyperionCallbackRunner(dev_mode) + runner.start() + + +if __name__ == "__main__": + main() diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/aperture_change_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/aperture_change_callback.py new file mode 100644 index 000000000..593e13e10 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/aperture_change_callback.py @@ -0,0 +1,22 @@ +from bluesky.callbacks import CallbackBase +from event_model.documents.run_start import RunStart + +from mx_bluesky.hyperion.log import LOGGER + +from .logging_callback import format_doc_for_log + + +class ApertureChangeCallback(CallbackBase): + """A callback that's used to send the selected aperture back to GDA""" + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.last_selected_aperture: str = "NONE" + + def start(self, doc: RunStart): + if doc.get("subplan_name") == "change_aperture": + LOGGER.debug(f"START: {format_doc_for_log(doc)}") + ap_size = doc.get("aperture_size") + assert isinstance(ap_size, str) + LOGGER.info(f"Updating most recent in-plan aperture change to {ap_size}.") + self.last_selected_aperture = ap_size diff --git a/tests/i24/serial/fixed_target/__init__.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/common/__init__.py similarity index 100% rename from tests/i24/serial/fixed_target/__init__.py rename to src/mx_bluesky/hyperion/external_interaction/callbacks/common/__init__.py diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/common/callback_util.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/common/callback_util.py new file mode 100644 index 000000000..d5be89bf5 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/common/callback_util.py @@ -0,0 +1,46 @@ +from collections.abc import Callable + +from bluesky.callbacks import CallbackBase + +from mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback import ( + RobotLoadISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback import ( + GridscanNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback import ( + ZocaloCallback, +) + +CallbacksFactory = Callable[[], tuple[CallbackBase, ...]] + + +def create_robot_load_and_centre_callbacks() -> ( + tuple[GridscanNexusFileCallback, GridscanISPyBCallback, RobotLoadISPyBCallback] +): + return ( + GridscanNexusFileCallback(), + GridscanISPyBCallback(emit=ZocaloCallback()), + RobotLoadISPyBCallback(), + ) + + +def create_gridscan_callbacks() -> ( + tuple[GridscanNexusFileCallback, GridscanISPyBCallback] +): + return (GridscanNexusFileCallback(), GridscanISPyBCallback(emit=ZocaloCallback())) + + +def create_rotation_callbacks() -> ( + tuple[RotationNexusFileCallback, RotationISPyBCallback] +): + return (RotationNexusFileCallback(), RotationISPyBCallback(emit=ZocaloCallback())) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/common/ispyb_mapping.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/common/ispyb_mapping.py new file mode 100644 index 000000000..911c99a4d --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/common/ispyb_mapping.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import re + +from dodal.devices.detector import DetectorParams + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGroupInfo, + DataCollectionInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import IspybParams +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + EIGER_FILE_SUFFIX, + I03_EIGER_DETECTOR, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils import ( + VISIT_PATH_REGEX, + get_current_time_string, +) +from mx_bluesky.hyperion.parameters.components import DiffractionExperimentWithSample + + +def populate_data_collection_group(params: DiffractionExperimentWithSample): + dcg_info = DataCollectionGroupInfo( + visit_string=params.visit, + experiment_type=params.ispyb_experiment_type.value, + sample_id=params.sample_id, + ) + return dcg_info + + +def populate_remaining_data_collection_info( + comment, + data_collection_group_id, + data_collection_info: DataCollectionInfo, + params: DiffractionExperimentWithSample, +): + data_collection_info.visit_string = params.visit + data_collection_info.parent_id = data_collection_group_id + data_collection_info.sample_id = params.sample_id + data_collection_info.detector_id = I03_EIGER_DETECTOR + data_collection_info.comments = comment + data_collection_info.detector_distance = params.detector_params.detector_distance + data_collection_info.exp_time = params.detector_params.exposure_time + data_collection_info.imgdir = params.detector_params.directory + data_collection_info.imgprefix = params.detector_params.prefix + data_collection_info.imgsuffix = EIGER_FILE_SUFFIX + # Both overlap and n_passes included for backwards compatibility, + # planned to be removed later + data_collection_info.n_passes = 1 + data_collection_info.overlap = 0 + data_collection_info.start_image_number = 1 + beam_position = params.detector_params.get_beam_position_mm( + params.detector_params.detector_distance + ) + data_collection_info.xbeam = beam_position[0] + data_collection_info.ybeam = beam_position[1] + data_collection_info.start_time = get_current_time_string() + # temporary file template until nxs filewriting is integrated and we can use + # that file name + data_collection_info.file_template = f"{params.detector_params.prefix}_{data_collection_info.data_collection_number}_master.h5" + return data_collection_info + + +def get_proposal_and_session_from_visit_string(visit_string: str) -> tuple[str, int]: + visit_parts = visit_string.split("-") + assert len(visit_parts) == 2, f"Unexpected visit string {visit_string}" + return visit_parts[0], int(visit_parts[1]) + + +def get_visit_string_from_path(path: str | None) -> str | None: + match = re.search(VISIT_PATH_REGEX, path) if path else None + return str(match.group(1)) if match else None + + +def get_visit_string(ispyb_params: IspybParams, detector_params: DetectorParams) -> str: + assert ispyb_params and detector_params, "StoreInISPyB didn't acquire params" + visit_path_match = get_visit_string_from_path(ispyb_params.visit_path) + if visit_path_match: + return visit_path_match + visit_path_match = get_visit_string_from_path(detector_params.directory) + if not visit_path_match: + raise ValueError( + f"Visit not found from {ispyb_params.visit_path} or {detector_params.directory}" + ) + return visit_path_match diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/grid_detection_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/grid_detection_callback.py new file mode 100644 index 000000000..ff011fc9a --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/grid_detection_callback.py @@ -0,0 +1,88 @@ +from typing import TypedDict + +import numpy as np +from bluesky.callbacks import CallbackBase +from dodal.devices.oav.oav_detector import OAVConfigParams +from dodal.devices.oav.utils import calculate_x_y_z_of_pixel +from event_model.documents import Event + +from mx_bluesky.hyperion.log import LOGGER + + +class GridParamUpdate(TypedDict): + x_start_um: float + y_start_um: float + y2_start_um: float + z_start_um: float + z2_start_um: float + x_steps: int + y_steps: int + z_steps: int + x_step_size_um: float + y_step_size_um: float + z_step_size_um: float + + +class GridDetectionCallback(CallbackBase): + def __init__( + self, + oav_params: OAVConfigParams, + *args, + ) -> None: + super().__init__(*args) + self.oav_params = oav_params + self.start_positions: list = [] + self.box_numbers: list = [] + + def event(self, doc: Event): + data = doc.get("data") + top_left_x_px = data["oav_grid_snapshot_top_left_x"] + box_width_px = data["oav_grid_snapshot_box_width"] + x_of_centre_of_first_box_px = top_left_x_px + box_width_px / 2 + + top_left_y_px = data["oav_grid_snapshot_top_left_y"] + y_of_centre_of_first_box_px = top_left_y_px + box_width_px / 2 + + smargon_omega = data["smargon-omega"] + current_xyz = np.array( + [data["smargon-x"], data["smargon-y"], data["smargon-z"]] + ) + + centre_of_first_box = ( + x_of_centre_of_first_box_px, + y_of_centre_of_first_box_px, + ) + + position_grid_start = calculate_x_y_z_of_pixel( + current_xyz, smargon_omega, centre_of_first_box, self.oav_params + ) + + LOGGER.info(f"Calculated start position {position_grid_start}") + + self.start_positions.append(position_grid_start) + self.box_numbers.append( + ( + data["oav_grid_snapshot_num_boxes_x"], + data["oav_grid_snapshot_num_boxes_y"], + ) + ) + + self.x_step_size_mm = box_width_px * self.oav_params.micronsPerXPixel / 1000 + self.y_step_size_mm = box_width_px * self.oav_params.micronsPerYPixel / 1000 + self.z_step_size_mm = box_width_px * self.oav_params.micronsPerYPixel / 1000 + return doc + + def get_grid_parameters(self) -> GridParamUpdate: + return { + "x_start_um": self.start_positions[0][0], + "y_start_um": self.start_positions[0][1], + "y2_start_um": self.start_positions[0][1], + "z_start_um": self.start_positions[1][2], + "z2_start_um": self.start_positions[1][2], + "x_steps": self.box_numbers[0][0], + "y_steps": self.box_numbers[0][1], + "z_steps": self.box_numbers[1][1], + "x_step_size_um": self.x_step_size_mm, + "y_step_size_um": self.y_step_size_mm, + "z_step_size_um": self.z_step_size_mm, + } diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/ispyb_callback_base.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/ispyb_callback_base.py new file mode 100644 index 000000000..e35512361 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/ispyb_callback_base.py @@ -0,0 +1,205 @@ +from __future__ import annotations + +from abc import abstractmethod +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any, TypeVar, cast + +from dodal.beamline_specific_utils.i03 import beam_size_from_aperture +from dodal.devices.aperturescatterguard import SingleAperturePosition +from dodal.devices.detector.det_resolution import resolution +from dodal.devices.synchrotron import SynchrotronMode + +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils import get_ispyb_config +from mx_bluesky.hyperion.log import ISPYB_LOGGER, set_dcgid_tag +from mx_bluesky.hyperion.parameters.components import DiffractionExperimentWithSample +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.utils.utils import convert_eV_to_angstrom + +from .logging_callback import format_doc_for_log + +D = TypeVar("D") +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart, RunStop + + +class BaseISPyBCallback(PlanReactiveCallback): + def __init__( + self, + *, + emit: Callable[..., Any] | None = None, + ) -> None: + """Subclasses should run super().__init__() with parameters, then set + self.ispyb to the type of ispyb relevant to the experiment and define the type + for self.ispyb_ids.""" + ISPYB_LOGGER.debug("Initialising ISPyB callback") + super().__init__(log=ISPYB_LOGGER, emit=emit) + self._oav_snapshot_event_idx: int = 0 + self.params: DiffractionExperimentWithSample | None = None + self.ispyb: StoreInIspyb + self.descriptors: dict[str, EventDescriptor] = {} + self.ispyb_config = get_ispyb_config() + if ( + self.ispyb_config == CONST.SIM.ISPYB_CONFIG + or self.ispyb_config == CONST.SIM.DEV_ISPYB_DATABASE_CFG + ): + ISPYB_LOGGER.warning( + f"{self.__class__} using dev ISPyB config: {self.ispyb_config}. If you" + "want to use the real database, please set the ISPYB_CONFIG_PATH " + "environment variable." + ) + self.uid_to_finalize_on: str | None = None + self.ispyb_ids: IspybIds = IspybIds() + self.log = ISPYB_LOGGER + + def activity_gated_start(self, doc: RunStart): + self._oav_snapshot_event_idx = 0 + return self._tag_doc(doc) + + def activity_gated_descriptor(self, doc: EventDescriptor): + self.descriptors[doc["uid"]] = doc + return self._tag_doc(doc) + + def activity_gated_event(self, doc: Event) -> Event: + """Subclasses should extend this to add a call to set_dcig_tag from + hyperion.log""" + ISPYB_LOGGER.debug("ISPyB handler received event document.") + assert self.ispyb is not None, "ISPyB deposition wasn't initialised!" + assert self.params is not None, "ISPyB handler didn't receive parameters!" + + event_descriptor = self.descriptors.get(doc["descriptor"]) + if event_descriptor is None: + ISPYB_LOGGER.warning( + f"Ispyb handler {self} received event doc {format_doc_for_log(doc)} and " + "has no corresponding descriptor record" + ) + return doc + match event_descriptor.get("name"): + case CONST.DESCRIPTORS.HARDWARE_READ_PRE: + scan_data_infos = self._handle_ispyb_hardware_read(doc) + case CONST.DESCRIPTORS.HARDWARE_READ_DURING: + scan_data_infos = self._handle_ispyb_transmission_flux_read(doc) + case _: + return self._tag_doc(doc) + self.ispyb_ids = self.ispyb.update_deposition(self.ispyb_ids, scan_data_infos) + ISPYB_LOGGER.info(f"Recieved ISPYB IDs: {self.ispyb_ids}") + return self._tag_doc(doc) + + def _handle_ispyb_hardware_read(self, doc) -> Sequence[ScanDataInfo]: + assert self.params, "Event handled before activity_gated_start received params" + ISPYB_LOGGER.info("ISPyB handler received event from read hardware") + assert isinstance( + synchrotron_mode := doc["data"]["synchrotron-synchrotron_mode"], + SynchrotronMode, + ) + + hwscan_data_collection_info = DataCollectionInfo( + undulator_gap1=doc["data"]["undulator-current_gap"], + synchrotron_mode=synchrotron_mode.value, + slitgap_horizontal=doc["data"]["s4_slit_gaps_xgap"], + slitgap_vertical=doc["data"]["s4_slit_gaps_ygap"], + ) + hwscan_position_info = DataCollectionPositionInfo( + pos_x=doc["data"]["smargon-x"], + pos_y=doc["data"]["smargon-y"], + pos_z=doc["data"]["smargon-z"], + ) + scan_data_infos = self.populate_info_for_update( + hwscan_data_collection_info, hwscan_position_info, self.params + ) + ISPYB_LOGGER.info("Updating ispyb data collection after hardware read.") + return scan_data_infos + + def _handle_ispyb_transmission_flux_read(self, doc) -> Sequence[ScanDataInfo]: + assert self.params + aperture_size = SingleAperturePosition( + **doc["data"]["aperture_scatterguard-selected_aperture"] + ) + beamsize = beam_size_from_aperture(aperture_size) + beamsize_x_mm = beamsize.x_um / 1000 if beamsize.x_um else None + beamsize_y_mm = beamsize.y_um / 1000 if beamsize.y_um else None + hwscan_data_collection_info = DataCollectionInfo( + beamsize_at_samplex=beamsize_x_mm, + beamsize_at_sampley=beamsize_y_mm, + focal_spot_size_at_samplex=beamsize_x_mm, + focal_spot_size_at_sampley=beamsize_y_mm, + flux=doc["data"]["flux_flux_reading"], + ) + if transmission := doc["data"]["attenuator-actual_transmission"]: + # Ispyb wants the transmission in a percentage, we use fractions + hwscan_data_collection_info.transmission = transmission * 100 + event_energy = doc["data"]["dcm-energy_in_kev"] + if event_energy: + energy_ev = event_energy * 1000 + wavelength_angstroms = convert_eV_to_angstrom(energy_ev) + hwscan_data_collection_info.wavelength = wavelength_angstroms + hwscan_data_collection_info.resolution = resolution( + self.params.detector_params, + wavelength_angstroms, + self.params.detector_params.detector_distance, + ) + scan_data_infos = self.populate_info_for_update( + hwscan_data_collection_info, None, self.params + ) + ISPYB_LOGGER.info("Updating ispyb data collection after flux read.") + self.append_to_comment(f"Aperture: {aperture_size.name}. ") + return scan_data_infos + + @abstractmethod + def populate_info_for_update( + self, + event_sourced_data_collection_info: DataCollectionInfo, + event_sourced_position_info: DataCollectionPositionInfo | None, + params: DiffractionExperimentWithSample, + ) -> Sequence[ScanDataInfo]: + pass + + def activity_gated_stop(self, doc: RunStop) -> RunStop: + """Subclasses must check that they are recieving a stop document for the correct + uid to use this method!""" + assert isinstance( + self.ispyb, StoreInIspyb + ), "ISPyB handler received stop document, but deposition object doesn't exist!" + ISPYB_LOGGER.debug("ISPyB handler received stop document.") + exit_status = ( + doc.get("exit_status") or "Exit status not available in stop document!" + ) + reason = doc.get("reason") or "" + set_dcgid_tag(None) + try: + self.ispyb.end_deposition(self.ispyb_ids, exit_status, reason) + except Exception as e: + ISPYB_LOGGER.warning( + f"Failed to finalise ISPyB deposition on stop document: {format_doc_for_log(doc)} with exception: {e}" + ) + return self._tag_doc(doc) + + def _append_to_comment(self, id: int, comment: str) -> None: + assert isinstance(self.ispyb, StoreInIspyb) + try: + self.ispyb.append_to_comment(id, comment) + except TypeError: + ISPYB_LOGGER.warning( + "ISPyB deposition not initialised, can't update comment." + ) + + def append_to_comment(self, comment: str): + for id in self.ispyb_ids.data_collection_ids: + self._append_to_comment(id, comment) + + def _tag_doc(self, doc: D) -> D: + assert isinstance(doc, dict) + if self.ispyb_ids: + doc["ispyb_dcids"] = self.ispyb_ids.data_collection_ids + return cast(D, doc) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/log_uid_tag_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/log_uid_tag_callback.py new file mode 100644 index 000000000..b8f02baf7 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/log_uid_tag_callback.py @@ -0,0 +1,20 @@ +from bluesky.callbacks import CallbackBase +from event_model import RunStart, RunStop + +from mx_bluesky.hyperion.log import set_uid_tag + + +class LogUidTaggingCallback(CallbackBase): + def __init__(self) -> None: + """Sets the logging filter to add the outermost run uid to graylog messages""" + self.run_uid = None + + def start(self, doc: RunStart): + if self.run_uid is None: + self.run_uid = doc.get("uid") + set_uid_tag(self.run_uid) + + def stop(self, doc: RunStop): + if doc.get("run_start") == self.run_uid: + self.run_uid = None + set_uid_tag(None) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/logging_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/logging_callback.py new file mode 100644 index 000000000..71ee07381 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/logging_callback.py @@ -0,0 +1,29 @@ +import json + +from bluesky.callbacks import CallbackBase + +from mx_bluesky.hyperion.log import LOGGER + + +class _BestEffortEncoder(json.JSONEncoder): + def default(self, o): + return repr(o) + + +def format_doc_for_log(doc): + return json.dumps(doc, indent=2, cls=_BestEffortEncoder) + + +class VerbosePlanExecutionLoggingCallback(CallbackBase): + def start(self, doc): + LOGGER.info(f"START: {format_doc_for_log(doc)}") + + def descriptor(self, doc): + LOGGER.info(f"DESCRIPTOR: {format_doc_for_log(doc)}") + + def event(self, doc): + LOGGER.info(f"EVENT: {format_doc_for_log(doc)}") + return doc + + def stop(self, doc): + LOGGER.info(f"STOP: {format_doc_for_log(doc)}") diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/plan_reactive_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/plan_reactive_callback.py new file mode 100644 index 000000000..79e0a70e1 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/plan_reactive_callback.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +from collections.abc import Callable +from logging import Logger +from typing import TYPE_CHECKING, Any + +from bluesky.callbacks import CallbackBase + +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart, RunStop + + +class PlanReactiveCallback(CallbackBase): + log: Logger # type: ignore # this is initialised to None and not annotated in the superclass + + def __init__( + self, + log: Logger, + *, + emit: Callable[..., Any] | None = None, + ) -> None: + """A callback base class which can be left permanently subscribed to a plan, and + will 'activate' and 'deactivate' at the start and end of a plan which provides + metadata to trigger this. + The run_decorator of the plan should include in its metadata dictionary the key + 'activate callbacks', with a list of strings of the callback class(es) to + activate or deactivate. On a recieving a start doc which specifies this, this + class will be activated, and on recieving the stop document for the + corresponding uid it will deactivate. The ordinary 'start', 'descriptor', + 'event' and 'stop' methods will be triggered as normal, and will in turn trigger + 'activity_gated_' methods - to preserve this functionality, subclasses which + override 'start' etc. should include a call to super().start(...) etc. + The logic of how activation is triggered will change to a more readable, version + in the future (https://github.com/DiamondLightSource/hyperion/issues/964).""" + + super().__init__(emit=emit) + self.emit_cb = emit # to avoid GC; base class only holds a WeakRef + self.active = False + self.activity_uid = 0 + self.log = log + + def _run_activity_gated(self, name: str, func, doc, override=False): + # Runs `func` if self.active is True or overide is true. Override can be used + # to run the function even after setting self.active to False, i.e. in the last + # handler of a run. + + running_gated_function = override or self.active + if not running_gated_function: + return doc + try: + return self.emit(name, func(doc)) + except Exception as e: + self.log.exception(e) + raise + + def start(self, doc: RunStart) -> RunStart | None: + callbacks_to_activate = doc.get("activate_callbacks") + if callbacks_to_activate and not self.active: + activate = type(self).__name__ in callbacks_to_activate + self.active = activate + self.log.info( + f"{'' if activate else 'not'} activating {type(self).__name__}" + ) + self.activity_uid = doc.get("uid") + return self._run_activity_gated("start", self.activity_gated_start, doc) + + def descriptor(self, doc: EventDescriptor) -> EventDescriptor | None: + return self._run_activity_gated( + "descriptor", self.activity_gated_descriptor, doc + ) + + def event(self, doc: Event) -> Event | None: # type: ignore + return self._run_activity_gated("event", self.activity_gated_event, doc) + + def stop(self, doc: RunStop) -> RunStop | None: + do_stop = self.active + if doc.get("run_start") == self.activity_uid: + self.active = False + self.activity_uid = 0 + return ( + self._run_activity_gated( + "stop", self.activity_gated_stop, doc, override=True + ) + if do_stop + else doc + ) + + def activity_gated_start(self, doc: RunStart) -> RunStart | None: + return doc + + def activity_gated_descriptor(self, doc: EventDescriptor) -> EventDescriptor | None: + return doc + + def activity_gated_event(self, doc: Event) -> Event | None: + return doc + + def activity_gated_stop(self, doc: RunStop) -> RunStop | None: + return doc + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} with id: {hex(id(self))} - active: {self.active}>" diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/robot_load/ispyb_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/robot_load/ispyb_callback.py new file mode 100644 index 000000000..dd2e79e3b --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/robot_load/ispyb_callback.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from event_model.documents import EventDescriptor + +from mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping import ( + get_proposal_and_session_from_visit_string, + get_visit_string_from_path, +) +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store import ( + ExpeyeInteraction, + RobotActionID, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST + +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart, RunStop + + +class RobotLoadISPyBCallback(PlanReactiveCallback): + def __init__(self) -> None: + ISPYB_LOGGER.debug("Initialising ISPyB Robot Load Callback") + super().__init__(log=ISPYB_LOGGER) + self.run_uid: str | None = None + self.descriptors: dict[str, EventDescriptor] = {} + self.action_id: RobotActionID | None = None + self.expeye = ExpeyeInteraction() + + def activity_gated_start(self, doc: RunStart): + ISPYB_LOGGER.debug("ISPyB robot load callback received start document.") + if doc.get("subplan_name") == CONST.PLAN.ROBOT_LOAD: + ISPYB_LOGGER.debug(f"ISPyB robot load callback received: {doc}") + self.run_uid = doc.get("uid") + assert isinstance(metadata := doc.get("metadata"), dict) + assert isinstance( + visit := get_visit_string_from_path(metadata["visit_path"]), str + ) + proposal, session = get_proposal_and_session_from_visit_string(visit) + self.action_id = self.expeye.start_load( + proposal, + session, + metadata["sample_id"], + metadata["sample_puck"], + metadata["sample_pin"], + ) + return super().activity_gated_start(doc) + + def activity_gated_descriptor(self, doc: EventDescriptor) -> EventDescriptor | None: + self.descriptors[doc["uid"]] = doc + return super().activity_gated_descriptor(doc) + + def activity_gated_event(self, doc: Event) -> Event | None: + event_descriptor = self.descriptors.get(doc["descriptor"]) + if ( + event_descriptor + and event_descriptor.get("name") == CONST.DESCRIPTORS.ROBOT_LOAD + ): + assert ( + self.action_id is not None + ), "ISPyB Robot load callback event called unexpectedly" + barcode = doc["data"]["robot-barcode"] + oav_snapshot = doc["data"]["oav_snapshot_last_saved_path"] + webcam_snapshot = doc["data"]["webcam-last_saved_path"] + # I03 uses webcam/oav snapshots in place of before/after snapshots + self.expeye.update_barcode_and_snapshots( + self.action_id, barcode, webcam_snapshot, oav_snapshot + ) + + return super().activity_gated_event(doc) + + def activity_gated_stop(self, doc: RunStop) -> RunStop | None: + ISPYB_LOGGER.debug("ISPyB robot load callback received stop document.") + if doc.get("run_start") == self.run_uid: + assert ( + self.action_id is not None + ), "ISPyB Robot load callback stop called unexpectedly" + exit_status = ( + doc.get("exit_status") or "Exit status not available in stop document!" + ) + reason = doc.get("reason") or "OK" + self.expeye.end_load(self.action_id, exit_status, reason) + self.action_id = None + return super().activity_gated_stop(doc) diff --git a/tests/i24/serial/setup_beamline/__init__.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/__init__.py similarity index 100% rename from tests/i24/serial/setup_beamline/__init__.py rename to src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/__init__.py diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_callback.py new file mode 100644 index 000000000..03983412a --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_callback.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +from collections.abc import Callable, Sequence +from typing import TYPE_CHECKING, Any, cast + +from mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping import ( + populate_data_collection_group, + populate_remaining_data_collection_info, +) +from mx_bluesky.hyperion.external_interaction.callbacks.ispyb_callback_base import ( + BaseISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_mapping import ( + populate_data_collection_info_for_rotation, +) +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER, set_dcgid_tag +from mx_bluesky.hyperion.parameters.components import IspybExperimentType +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +if TYPE_CHECKING: + from event_model.documents import Event, RunStart, RunStop + + +class RotationISPyBCallback(BaseISPyBCallback): + """Callback class to handle the deposition of experiment parameters into the ISPyB + database. Listens for 'event' and 'descriptor' documents. Creates the ISpyB entry on + recieving an 'event' document for the 'ispyb_reading_hardware' event, and updates the + deposition on recieving its final 'stop' document. + + To use, subscribe the Bluesky RunEngine to an instance of this class. + E.g.: + ispyb_handler_callback = RotationISPyBCallback(parameters) + RE.subscribe(ispyb_handler_callback) + Or decorate a plan using bluesky.preprocessors.subs_decorator. + + See: https://blueskyproject.io/bluesky/callbacks.html#ways-to-invoke-callbacks + """ + + def __init__( + self, + *, + emit: Callable[..., Any] | None = None, + ) -> None: + super().__init__(emit=emit) + self.last_sample_id: int | None = None + self.ispyb_ids: IspybIds = IspybIds() + + def activity_gated_start(self, doc: RunStart): + if doc.get("subplan_name") == CONST.PLAN.ROTATION_OUTER: + ISPYB_LOGGER.info( + "ISPyB callback received start document with experiment parameters." + ) + self.params = RotationScan.from_json(doc.get("hyperion_parameters")) + dcgid = ( + self.ispyb_ids.data_collection_group_id + if (self.params.sample_id == self.last_sample_id) + else None + ) + if ( + self.params.ispyb_experiment_type + == IspybExperimentType.CHARACTERIZATION + ): + ISPYB_LOGGER.info("Screening collection - using new DCG") + dcgid = None + self.last_sample_id = None + else: + ISPYB_LOGGER.info( + f"Collection is {self.params.ispyb_experiment_type} - storing sampleID to bundle images" + ) + self.last_sample_id = self.params.sample_id + self.ispyb = StoreInIspyb(self.ispyb_config) + ISPYB_LOGGER.info("Beginning ispyb deposition") + data_collection_group_info = populate_data_collection_group(self.params) + data_collection_info = populate_data_collection_info_for_rotation( + cast(RotationScan, self.params) + ) + data_collection_info = populate_remaining_data_collection_info( + self.params.comment, + dcgid, + data_collection_info, + self.params, + ) + data_collection_info.parent_id = dcgid + scan_data_info = ScanDataInfo( + data_collection_info=data_collection_info, + ) + self.ispyb_ids = self.ispyb.begin_deposition( + data_collection_group_info, [scan_data_info] + ) + ISPYB_LOGGER.info("ISPYB handler received start document.") + if doc.get("subplan_name") == CONST.PLAN.ROTATION_MAIN: + self.uid_to_finalize_on = doc.get("uid") + return super().activity_gated_start(doc) + + def populate_info_for_update( + self, + event_sourced_data_collection_info: DataCollectionInfo, + event_sourced_position_info: DataCollectionPositionInfo | None, + params, + ) -> Sequence[ScanDataInfo]: + assert ( + self.ispyb_ids.data_collection_ids + ), "Expect an existing DataCollection to update" + + return [ + ScanDataInfo( + data_collection_info=event_sourced_data_collection_info, + data_collection_id=self.ispyb_ids.data_collection_ids[0], + data_collection_position_info=event_sourced_position_info, + ) + ] + + def _handle_ispyb_hardware_read(self, doc: Event): + """Use the hardware read values to create the ispyb comment""" + scan_data_infos = super()._handle_ispyb_hardware_read(doc) + motor_positions_mm = [ + doc["data"]["smargon-x"], + doc["data"]["smargon-y"], + doc["data"]["smargon-z"], + ] + assert ( + self.params + ), "handle_ispyb_hardware_read triggered before activity_gated_start" + motor_positions_um = [position * 1000 for position in motor_positions_mm] + comment = f"Sample position (µm): ({motor_positions_um[0]:.0f}, {motor_positions_um[1]:.0f}, {motor_positions_um[2]:.0f}) {self.params.comment} " + scan_data_infos[0].data_collection_info.comments = comment + return scan_data_infos + + def activity_gated_event(self, doc: Event): + doc = super().activity_gated_event(doc) + set_dcgid_tag(self.ispyb_ids.data_collection_group_id) + + descriptor_name = self.descriptors[doc["descriptor"]].get("name") + if descriptor_name == CONST.DESCRIPTORS.OAV_ROTATION_SNAPSHOT_TRIGGERED: + scan_data_infos = self._handle_oav_rotation_snapshot_triggered(doc) + self.ispyb_ids = self.ispyb.update_deposition( + self.ispyb_ids, scan_data_infos + ) + + return doc + + def _handle_oav_rotation_snapshot_triggered(self, doc) -> Sequence[ScanDataInfo]: + assert self.ispyb_ids.data_collection_ids, "No current data collection" + assert self.params, "ISPyB handler didn't receive parameters!" + data = doc["data"] + self._oav_snapshot_event_idx += 1 + data_collection_info = DataCollectionInfo( + **{ + f"xtal_snapshot{self._oav_snapshot_event_idx}": data.get( + "oav_snapshot_last_saved_path" + ) + } + ) + scan_data_info = ScanDataInfo( + data_collection_id=self.ispyb_ids.data_collection_ids[-1], + data_collection_info=data_collection_info, + ) + return [scan_data_info] + + def activity_gated_stop(self, doc: RunStop) -> RunStop: + if doc.get("run_start") == self.uid_to_finalize_on: + self.uid_to_finalize_on = None + return super().activity_gated_stop(doc) + return self._tag_doc(doc) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_mapping.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_mapping.py new file mode 100644 index 000000000..ee9c44a9f --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_mapping.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import DataCollectionInfo +from mx_bluesky.hyperion.log import ISPYB_LOGGER +from mx_bluesky.hyperion.parameters.rotation import RotationScan + + +def populate_data_collection_info_for_rotation(params: RotationScan): + info = DataCollectionInfo( + omega_start=params.omega_start_deg, + data_collection_number=params.detector_params.run_number, # type:ignore # the validator always makes this int + n_images=params.num_images, + axis_range=params.rotation_increment_deg, + axis_start=params.omega_start_deg, + axis_end=(params.omega_start_deg + params.scan_width_deg), + kappa_start=params.kappa_start_deg, + ) + ( + info.xtal_snapshot1, + info.xtal_snapshot2, + info.xtal_snapshot3, + info.xtal_snapshot4, + ) = get_xtal_snapshots(params.ispyb_params) + return info + + +def get_xtal_snapshots(ispyb_params): + if ispyb_params.xtal_snapshots_omega_start: + xtal_snapshots = ispyb_params.xtal_snapshots_omega_start[:4] + ISPYB_LOGGER.info( + f"Using rotation scan snapshots {xtal_snapshots} for ISPyB deposition" + ) + else: + ISPYB_LOGGER.warning("No xtal snapshot paths sent to ISPyB!") + xtal_snapshots = [] + return xtal_snapshots + [None] * (4 - len(xtal_snapshots)) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/nexus_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/nexus_callback.py new file mode 100644 index 000000000..a79a7ad49 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/rotation/nexus_callback.py @@ -0,0 +1,102 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.nexus.nexus_utils import ( + create_beam_and_attenuator_parameters, + vds_type_based_on_bit_depth, +) +from mx_bluesky.hyperion.external_interaction.nexus.write_nexus import NexusWriter +from mx_bluesky.hyperion.log import NEXUS_LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from ..logging_callback import format_doc_for_log + +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart + + +class RotationNexusFileCallback(PlanReactiveCallback): + """Callback class to handle the creation of Nexus files based on experiment + parameters for rotation scans + + To use, subscribe the Bluesky RunEngine to an instance of this class. + E.g.: + nexus_file_handler_callback = NexusFileCallback(parameters) + RE.subscribe(nexus_file_handler_callback) + Or decorate a plan using bluesky.preprocessors.subs_decorator. + + See: https://blueskyproject.io/bluesky/callbacks.html#ways-to-invoke-callbacks + """ + + def __init__(self) -> None: + super().__init__(NEXUS_LOGGER) + self.run_uid: str | None = None + self.writer: NexusWriter | None = None + self.descriptors: dict[str, EventDescriptor] = {} + # used when multiple collections are made in one detector arming event: + self.full_num_of_images: int | None = None + self.meta_data_run_number: int | None = None + + def activity_gated_descriptor(self, doc: EventDescriptor): + self.descriptors[doc["uid"]] = doc + + def activity_gated_event(self, doc: Event): + event_descriptor = self.descriptors.get(doc["descriptor"]) + if event_descriptor is None: + NEXUS_LOGGER.warning( + f"Rotation Nexus handler {self} received event doc {format_doc_for_log(doc)} and " + "has no corresponding descriptor record" + ) + return doc + if event_descriptor.get("name") == CONST.DESCRIPTORS.HARDWARE_READ_DURING: + NEXUS_LOGGER.info( + f"Nexus handler received event from read hardware {format_doc_for_log(doc)}" + ) + data = doc["data"] + assert self.writer, "Nexus writer not initialised" + ( + self.writer.beam, + self.writer.attenuator, + ) = create_beam_and_attenuator_parameters( + data["dcm-energy_in_kev"], + data["flux_flux_reading"], + data["attenuator-actual_transmission"], + ) + vds_data_type = vds_type_based_on_bit_depth(doc["data"]["eiger_bit_depth"]) + self.writer.create_nexus_file(vds_data_type) + NEXUS_LOGGER.info(f"Nexus file created at {self.writer.data_filename}") + return doc + + def activity_gated_start(self, doc: RunStart): + if doc.get("subplan_name") == CONST.PLAN.ROTATION_MULTI: + self.full_num_of_images = doc.get("full_num_of_images") + self.meta_data_run_number = doc.get("meta_data_run_number") + if doc.get("subplan_name") == CONST.PLAN.ROTATION_OUTER: + self.run_uid = doc.get("uid") + json_params = doc.get("hyperion_parameters") + NEXUS_LOGGER.info( + f"Nexus writer received start document with experiment parameters {json_params}" + ) + parameters = RotationScan.from_json(json_params) + NEXUS_LOGGER.info("Setting up nexus file...") + det_size = ( + parameters.detector_params.detector_size_constants.det_size_pixels + ) + shape = (parameters.num_images, det_size.width, det_size.height) + self.writer = NexusWriter( + parameters, + shape, + parameters.scan_points, + omega_start_deg=parameters.omega_start_deg, + chi_start_deg=parameters.chi_start_deg or 0, + phi_start_deg=parameters.phi_start_deg or 0, + vds_start_index=parameters.nexus_vds_start_img, + full_num_of_images=self.full_num_of_images, + meta_data_run_number=self.meta_data_run_number, + rotation_direction=parameters.rotation_direction, + ) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/__init__.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_callback.py new file mode 100644 index 000000000..0045d84aa --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_callback.py @@ -0,0 +1,271 @@ +from __future__ import annotations + +from collections.abc import Callable, Sequence +from time import time +from typing import TYPE_CHECKING, Any + +import numpy as np +from blueapi.core import MsgGenerator +from bluesky import preprocessors as bpp +from dodal.devices.zocalo.zocalo_results import ZOCALO_READING_PLAN_NAME + +from mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping import ( + populate_data_collection_group, + populate_remaining_data_collection_info, +) +from mx_bluesky.hyperion.external_interaction.callbacks.ispyb_callback_base import ( + BaseISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.logging_callback import ( + format_doc_for_log, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_mapping import ( + construct_comment_for_gridscan, + populate_xy_data_collection_info, + populate_xz_data_collection_info, +) +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + DataCollectionInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER, set_dcgid_tag +from mx_bluesky.hyperion.parameters.components import DiffractionExperimentWithSample +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ( + GridCommon, +) + +if TYPE_CHECKING: + from event_model import Event, RunStart, RunStop + + +def ispyb_activation_wrapper(plan_generator: MsgGenerator, parameters): + return bpp.run_wrapper( + plan_generator, + md={ + "activate_callbacks": ["GridscanISPyBCallback"], + "subplan_name": CONST.PLAN.GRID_DETECT_AND_DO_GRIDSCAN, + "hyperion_parameters": parameters.json(), + }, + ) + + +class GridscanISPyBCallback(BaseISPyBCallback): + """Callback class to handle the deposition of experiment parameters into the ISPyB + database. Listens for 'event' and 'descriptor' documents. Creates the ISpyB entry on + recieving an 'event' document for the 'ispyb_reading_hardware' event, and updates the + deposition on recieving its final 'stop' document. + + To use, subscribe the Bluesky RunEngine to an instance of this class. + E.g.: + ispyb_handler_callback = FGSISPyBCallback(parameters) + RE.subscribe(ispyb_handler_callback) + Or decorate a plan using bluesky.preprocessors.subs_decorator. + + See: https://blueskyproject.io/bluesky/callbacks.html#ways-to-invoke-callbacks + """ + + def __init__( + self, + *, + emit: Callable[..., Any] | None = None, + ) -> None: + super().__init__(emit=emit) + self.ispyb: StoreInIspyb + self.ispyb_ids: IspybIds = IspybIds() + self._start_of_fgs_uid: str | None = None + self._processing_start_time: float | None = None + + def activity_gated_start(self, doc: RunStart): + if doc.get("subplan_name") == CONST.PLAN.DO_FGS: + self._start_of_fgs_uid = doc.get("uid") + if doc.get("subplan_name") == CONST.PLAN.GRID_DETECT_AND_DO_GRIDSCAN: + self.uid_to_finalize_on = doc.get("uid") + ISPYB_LOGGER.info( + "ISPyB callback received start document with experiment parameters and " + f"uid: {self.uid_to_finalize_on}" + ) + self.params = GridCommon.from_json( + doc.get("hyperion_parameters"), allow_extras=True + ) + self.ispyb = StoreInIspyb(self.ispyb_config) + data_collection_group_info = populate_data_collection_group(self.params) + + scan_data_infos = [ + ScanDataInfo( + data_collection_info=populate_remaining_data_collection_info( + None, + None, + populate_xy_data_collection_info( + self.params.detector_params, + ), + self.params, + ), + ), + ScanDataInfo( + data_collection_info=populate_remaining_data_collection_info( + None, + None, + populate_xz_data_collection_info(self.params.detector_params), + self.params, + ) + ), + ] + + self.ispyb_ids = self.ispyb.begin_deposition( + data_collection_group_info, scan_data_infos + ) + set_dcgid_tag(self.ispyb_ids.data_collection_group_id) + return super().activity_gated_start(doc) + + def activity_gated_event(self, doc: Event): + doc = super().activity_gated_event(doc) + + descriptor_name = self.descriptors[doc["descriptor"]].get("name") + if descriptor_name == ZOCALO_READING_PLAN_NAME: + self._handle_zocalo_read_event(doc) + elif descriptor_name == CONST.DESCRIPTORS.OAV_GRID_SNAPSHOT_TRIGGERED: + scan_data_infos = self._handle_oav_grid_snapshot_triggered(doc) + self.ispyb_ids = self.ispyb.update_deposition( + self.ispyb_ids, scan_data_infos + ) + + return doc + + def _handle_zocalo_read_event(self, doc): + crystal_summary = "" + if self._processing_start_time is not None: + proc_time = time() - self._processing_start_time + crystal_summary = f"Zocalo processing took {proc_time:.2f} s. " + bboxes: list[np.ndarray] = [] + ISPYB_LOGGER.info( + f"Amending comment based on Zocalo reading doc: {format_doc_for_log(doc)}" + ) + raw_results = doc["data"]["zocalo-results"] + if len(raw_results) > 0: + for n, res in enumerate(raw_results): + bb = res["bounding_box"] + diff = np.array(bb[1]) - np.array(bb[0]) + bboxes.append(diff) + + nicely_formatted_com = [ + f"{np.round(com, 2)}" for com in res["centre_of_mass"] + ] + crystal_summary += ( + f"Crystal {n + 1}: " + f"Strength {res['total_count']}; " + f"Position (grid boxes) {nicely_formatted_com}; " + f"Size (grid boxes) {bboxes[n]}; " + ) + else: + crystal_summary += "Zocalo found no crystals in this gridscan." + assert ( + self.ispyb_ids.data_collection_ids + ), "No data collection to add results to" + self.ispyb.append_to_comment( + self.ispyb_ids.data_collection_ids[0], crystal_summary + ) + + def _handle_oav_grid_snapshot_triggered(self, doc) -> Sequence[ScanDataInfo]: + assert self.ispyb_ids.data_collection_ids, "No current data collection" + assert self.params, "ISPyB handler didn't receive parameters!" + data = doc["data"] + data_collection_id = None + data_collection_info = DataCollectionInfo( + xtal_snapshot1=data.get("oav_grid_snapshot_last_path_full_overlay"), + xtal_snapshot2=data.get("oav_grid_snapshot_last_path_outer"), + xtal_snapshot3=data.get("oav_grid_snapshot_last_saved_path"), + n_images=( + data["oav_grid_snapshot_num_boxes_x"] + * data["oav_grid_snapshot_num_boxes_y"] + ), + ) + microns_per_pixel_x = data["oav_grid_snapshot_microns_per_pixel_x"] + microns_per_pixel_y = data["oav_grid_snapshot_microns_per_pixel_y"] + data_collection_grid_info = DataCollectionGridInfo( + dx_in_mm=data["oav_grid_snapshot_box_width"] * microns_per_pixel_x / 1000, + dy_in_mm=data["oav_grid_snapshot_box_width"] * microns_per_pixel_y / 1000, + steps_x=data["oav_grid_snapshot_num_boxes_x"], + steps_y=data["oav_grid_snapshot_num_boxes_y"], + microns_per_pixel_x=microns_per_pixel_x, + microns_per_pixel_y=microns_per_pixel_y, + snapshot_offset_x_pixel=int(data["oav_grid_snapshot_top_left_x"]), + snapshot_offset_y_pixel=int(data["oav_grid_snapshot_top_left_y"]), + orientation=Orientation.HORIZONTAL, + snaked=True, + ) + data_collection_info.comments = construct_comment_for_gridscan( + data_collection_grid_info + ) + if len(self.ispyb_ids.data_collection_ids) > self._oav_snapshot_event_idx: + data_collection_id = self.ispyb_ids.data_collection_ids[ + self._oav_snapshot_event_idx + ] + self._populate_axis_info(data_collection_info, doc["data"]["smargon-omega"]) + + scan_data_info = ScanDataInfo( + data_collection_info=data_collection_info, + data_collection_id=data_collection_id, + data_collection_grid_info=data_collection_grid_info, + ) + ISPYB_LOGGER.info("Updating ispyb data collection after oav snapshot.") + self._oav_snapshot_event_idx += 1 + return [scan_data_info] + + def _populate_axis_info( + self, data_collection_info: DataCollectionInfo, omega_start: float | None + ): + if omega_start is not None: + omega_in_gda_space = -omega_start + data_collection_info.omega_start = omega_in_gda_space + data_collection_info.axis_start = omega_in_gda_space + data_collection_info.axis_end = omega_in_gda_space + data_collection_info.axis_range = 0 + + def populate_info_for_update( + self, + event_sourced_data_collection_info: DataCollectionInfo, + event_sourced_position_info: DataCollectionPositionInfo | None, + params: DiffractionExperimentWithSample, + ) -> Sequence[ScanDataInfo]: + assert ( + self.ispyb_ids.data_collection_ids + ), "Expect at least one valid data collection to record scan data" + xy_scan_data_info = ScanDataInfo( + data_collection_info=event_sourced_data_collection_info, + data_collection_id=self.ispyb_ids.data_collection_ids[0], + ) + scan_data_infos = [xy_scan_data_info] + + data_collection_id = ( + self.ispyb_ids.data_collection_ids[1] + if len(self.ispyb_ids.data_collection_ids) > 1 + else None + ) + xz_scan_data_info = ScanDataInfo( + data_collection_info=event_sourced_data_collection_info, + data_collection_id=data_collection_id, + ) + scan_data_infos.append(xz_scan_data_info) + return scan_data_infos + + def activity_gated_stop(self, doc: RunStop) -> RunStop: + if doc.get("run_start") == self._start_of_fgs_uid: + self._processing_start_time = time() + if doc.get("run_start") == self.uid_to_finalize_on: + ISPYB_LOGGER.info( + "ISPyB callback received stop document corresponding to start document " + f"with uid: {self.uid_to_finalize_on}." + ) + if self.ispyb_ids == IspybIds(): + raise ISPyBDepositionNotMade("ispyb was not initialised at run start") + return super().activity_gated_stop(doc) + return self._tag_doc(doc) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_mapping.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_mapping.py new file mode 100644 index 000000000..ff166fc50 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/ispyb_mapping.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import numpy +from dodal.devices.detector import DetectorParams +from dodal.devices.oav import utils as oav_utils + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + DataCollectionInfo, +) + + +def populate_xz_data_collection_info(detector_params: DetectorParams): + assert ( + detector_params.omega_start is not None + and detector_params.run_number is not None + ), "StoreGridscanInIspyb failed to get parameters" + run_number = detector_params.run_number + 1 + info = DataCollectionInfo( + data_collection_number=run_number, + ) + return info + + +def populate_xy_data_collection_info(detector_params: DetectorParams): + return DataCollectionInfo( + data_collection_number=detector_params.run_number, + ) + + +def construct_comment_for_gridscan(grid_info: DataCollectionGridInfo) -> str: + assert grid_info is not None, "StoreGridScanInIspyb failed to get parameters" + + bottom_right = oav_utils.bottom_right_from_top_left( + numpy.array( + [grid_info.snapshot_offset_x_pixel, grid_info.snapshot_offset_y_pixel] + ), # type: ignore + grid_info.steps_x, + grid_info.steps_y, + grid_info.dx_in_mm, + grid_info.dy_in_mm, + grid_info.microns_per_pixel_x, + grid_info.microns_per_pixel_y, + ) + return ( + "Hyperion: Xray centring - Diffraction grid scan of " + f"{grid_info.steps_x} by " + f"{grid_info.steps_y} images in " + f"{(grid_info.dx_in_mm * 1e3):.1f} um by " + f"{(grid_info.dy_in_mm * 1e3):.1f} um steps. " + f"Top left (px): [{int(grid_info.snapshot_offset_x_pixel)},{int(grid_info.snapshot_offset_y_pixel)}], " + f"bottom right (px): [{bottom_right[0]},{bottom_right[1]}]." + ) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/nexus_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/nexus_callback.py new file mode 100644 index 000000000..f7775c43a --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/xray_centre/nexus_callback.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.nexus.nexus_utils import ( + create_beam_and_attenuator_parameters, + vds_type_based_on_bit_depth, +) +from mx_bluesky.hyperion.external_interaction.nexus.write_nexus import NexusWriter +from mx_bluesky.hyperion.log import NEXUS_LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart + + +class GridscanNexusFileCallback(PlanReactiveCallback): + """Callback class to handle the creation of Nexus files based on experiment \ + parameters. Initialises on recieving a 'start' document for the \ + 'run_gridscan_move_and_tidy' sub plan, which must also contain the run parameters, \ + as metadata under the 'hyperion_internal_parameters' key. Actually writes the \ + nexus files on updates the timestamps on recieving the 'ispyb_reading_hardware' event \ + document, and finalises the files on getting a 'stop' document for the whole run. + + To use, subscribe the Bluesky RunEngine to an instance of this class. + E.g.: + nexus_file_handler_callback = NexusFileCallback(parameters) + RE.subscribe(nexus_file_handler_callback) + Or decorate a plan using bluesky.preprocessors.subs_decorator. + + See: https://blueskyproject.io/bluesky/callbacks.html#ways-to-invoke-callbacks + """ + + def __init__(self) -> None: + super().__init__(NEXUS_LOGGER) + self.run_start_uid: str | None = None + self.nexus_writer_1: NexusWriter | None = None + self.nexus_writer_2: NexusWriter | None = None + self.descriptors: dict[str, EventDescriptor] = {} + self.log = NEXUS_LOGGER + + def activity_gated_start(self, doc: RunStart): + if doc.get("subplan_name") == CONST.PLAN.GRIDSCAN_OUTER: + json_params = doc.get("hyperion_parameters") + NEXUS_LOGGER.info( + f"Nexus writer received start document with experiment parameters {json_params}" + ) + parameters = ThreeDGridScan.from_json(json_params) + d_size = parameters.detector_params.detector_size_constants.det_size_pixels + grid_n_img_1 = parameters.scan_indices[1] + grid_n_img_2 = parameters.num_images - grid_n_img_1 + data_shape_1 = (grid_n_img_1, d_size.width, d_size.height) + data_shape_2 = (grid_n_img_2, d_size.width, d_size.height) + run_number_2 = parameters.detector_params.run_number + 1 + self.nexus_writer_1 = NexusWriter( + parameters, data_shape_1, parameters.scan_points_first_grid + ) + self.nexus_writer_2 = NexusWriter( + parameters, + data_shape_2, + parameters.scan_points_second_grid, + run_number=run_number_2, + vds_start_index=parameters.scan_indices[1], + omega_start_deg=90, + ) + self.run_start_uid = doc.get("uid") + + def activity_gated_descriptor(self, doc: EventDescriptor): + self.descriptors[doc["uid"]] = doc + + def activity_gated_event(self, doc: Event) -> Event | None: + assert (event_descriptor := self.descriptors.get(doc["descriptor"])) is not None + if event_descriptor.get("name") == CONST.DESCRIPTORS.HARDWARE_READ_DURING: + data = doc["data"] + for nexus_writer in [self.nexus_writer_1, self.nexus_writer_2]: + assert nexus_writer, "Nexus callback did not receive start doc" + ( + nexus_writer.beam, + nexus_writer.attenuator, + ) = create_beam_and_attenuator_parameters( + data["dcm-energy_in_kev"], + data["flux_flux_reading"], + data["attenuator-actual_transmission"], + ) + vds_data_type = vds_type_based_on_bit_depth( + doc["data"]["eiger_bit_depth"] + ) + nexus_writer.create_nexus_file(vds_data_type) + NEXUS_LOGGER.info(f"Nexus file created at {nexus_writer.data_filename}") + + return super().activity_gated_event(doc) diff --git a/src/mx_bluesky/hyperion/external_interaction/callbacks/zocalo_callback.py b/src/mx_bluesky/hyperion/external_interaction/callbacks/zocalo_callback.py new file mode 100644 index 000000000..0c034bd14 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/callbacks/zocalo_callback.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +from bluesky.callbacks import CallbackBase +from dodal.devices.zocalo import ZocaloStartInfo, ZocaloTrigger + +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.log import ISPYB_LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.utils.utils import number_of_frames_from_scan_spec + +if TYPE_CHECKING: + from event_model.documents import Event, EventDescriptor, RunStart, RunStop + + +class ZocaloCallback(CallbackBase): + """Callback class to handle the triggering of Zocalo processing. + Sends zocalo a run_start signal on receiving a start document for the specified + sub-plan, and sends a run_end signal on receiving a stop document for the same plan. + + The metadata of the sub-plan this starts on must include a zocalo_environment. + + Shouldn't be subscribed directly to the RunEngine, instead should be passed to the + `emit` argument of an ISPyB callback which appends DCIDs to the relevant start doc. + """ + + def _reset_state(self): + self.run_uid: str | None = None + self.triggering_plan: str | None = None + self.zocalo_interactor: ZocaloTrigger | None = None + self.zocalo_info: list[ZocaloStartInfo] = [] + self.descriptors: dict[str, EventDescriptor] = {} + + def __init__( + self, + ): + super().__init__() + self._reset_state() + + def start(self, doc: RunStart): + ISPYB_LOGGER.info("Zocalo handler received start document.") + if triggering_plan := doc.get(CONST.TRIGGER.ZOCALO): + self.triggering_plan = triggering_plan + assert isinstance(zocalo_environment := doc.get("zocalo_environment"), str) + ISPYB_LOGGER.info(f"Zocalo environment set to {zocalo_environment}.") + self.zocalo_interactor = ZocaloTrigger(zocalo_environment) + + if self.triggering_plan and doc.get("subplan_name") == self.triggering_plan: + self.run_uid = doc.get("uid") + assert isinstance(scan_points := doc.get("scan_points"), list) + if ( + isinstance(ispyb_ids := doc.get("ispyb_dcids"), tuple) + and len(ispyb_ids) > 0 + ): + ids_and_shape = list(zip(ispyb_ids, scan_points, strict=False)) + start_frame = 0 + self.zocalo_info = [] + for idx, id_and_shape in enumerate(ids_and_shape): + id, shape = id_and_shape + num_frames = number_of_frames_from_scan_spec(shape) + self.zocalo_info.append( + ZocaloStartInfo(id, None, start_frame, num_frames, idx) + ) + start_frame += num_frames + else: + raise ISPyBDepositionNotMade( + f"No ISPyB IDs received by the start of {self.triggering_plan=}" + ) + + def descriptor(self, doc: EventDescriptor): + self.descriptors[doc["uid"]] = doc + + def event(self, doc: Event) -> Event: + event_descriptor = self.descriptors[doc["descriptor"]] + if event_descriptor.get("name") == CONST.DESCRIPTORS.ZOCALO_HW_READ: + filename = doc["data"]["eiger_odin_file_writer_id"] + for start_info in self.zocalo_info: + start_info.filename = filename + assert self.zocalo_interactor is not None + self.zocalo_interactor.run_start(start_info) + return doc + + def stop(self, doc: RunStop): + if doc.get("run_start") == self.run_uid: + ISPYB_LOGGER.info( + f"Zocalo handler received stop document, for run {doc.get('run_start')}." + ) + assert self.zocalo_interactor is not None + for info in self.zocalo_info: + self.zocalo_interactor.run_end(info.ispyb_dcid) + self._reset_state() diff --git a/src/mx_bluesky/hyperion/external_interaction/config_server.py b/src/mx_bluesky/hyperion/external_interaction/config_server.py new file mode 100644 index 000000000..cfd89e161 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/config_server.py @@ -0,0 +1,35 @@ +from daq_config_server.client import ConfigServer +from pydantic import BaseModel + +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST + +_CONFIG_SERVER: ConfigServer | None = None + + +def config_server() -> ConfigServer: + global _CONFIG_SERVER + if _CONFIG_SERVER is None: + _CONFIG_SERVER = ConfigServer(CONST.CONFIG_SERVER_URL, LOGGER) + return _CONFIG_SERVER + + +class FeatureFlags(BaseModel): + # The default value will be used as the fallback when doing a best-effort fetch + # from the service + use_panda_for_gridscan: bool = False + use_gpu_for_gridscan: bool = False + set_stub_offsets: bool = False + + @classmethod + def _get_flags(cls): + flags = config_server().best_effort_get_all_feature_flags() + return {f: flags[f] for f in flags if f in cls.__fields__.keys()} + + @classmethod + def best_effort(cls): + return cls(**cls._get_flags()) + + def update_self_from_server(self): + for flag, value in self._get_flags().items(): + setattr(self, flag, value) diff --git a/src/mx_bluesky/hyperion/external_interaction/exceptions.py b/src/mx_bluesky/hyperion/external_interaction/exceptions.py new file mode 100644 index 000000000..a9a35d1b0 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/exceptions.py @@ -0,0 +1,13 @@ +from mx_bluesky.hyperion.exceptions import WarningException + + +class ISPyBDepositionNotMade(Exception): + """Raised when the ISPyB or Zocalo callbacks can't access ISPyB deposition numbers.""" + + pass + + +class NoCentreFoundException(WarningException): + """Error for if zocalo is unable to find the centre during a gridscan.""" + + pass diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/__init__.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/data_model.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/data_model.py new file mode 100644 index 000000000..89d54c8ed --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/ispyb/data_model.py @@ -0,0 +1,91 @@ +from dataclasses import asdict, dataclass + +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation + + +@dataclass() +class DataCollectionGroupInfo: + visit_string: str + experiment_type: str + sample_id: int | None + sample_barcode: str | None = None + + +@dataclass(kw_only=True) +class DataCollectionInfo: + omega_start: float | None = None + data_collection_number: int | None = None + xtal_snapshot1: str | None = None + xtal_snapshot2: str | None = None + xtal_snapshot3: str | None = None + xtal_snapshot4: str | None = None + + n_images: int | None = None + axis_range: float | None = None + axis_end: float | None = None + kappa_start: float | None = None + + parent_id: int | None = None + visit_string: str | None = None + sample_id: int | None = None + detector_id: int | None = None + axis_start: float | None = None + focal_spot_size_at_samplex: float | None = None + focal_spot_size_at_sampley: float | None = None + slitgap_vertical: float | None = None + slitgap_horizontal: float | None = None + beamsize_at_samplex: float | None = None + beamsize_at_sampley: float | None = None + transmission: float | None = None + comments: str | None = None + detector_distance: float | None = None + exp_time: float | None = None + imgdir: str | None = None + file_template: str | None = None + imgprefix: str | None = None + imgsuffix: str | None = None + n_passes: int | None = None + overlap: int | None = None + flux: float | None = None + start_image_number: int | None = None + resolution: float | None = None + wavelength: float | None = None + xbeam: float | None = None + ybeam: float | None = None + synchrotron_mode: str | None = None + undulator_gap1: float | None = None + start_time: str | None = None + + +@dataclass +class DataCollectionPositionInfo: + pos_x: float + pos_y: float + pos_z: float + + +@dataclass +class DataCollectionGridInfo: + dx_in_mm: float + dy_in_mm: float + steps_x: int + steps_y: int + microns_per_pixel_x: float + microns_per_pixel_y: float + snapshot_offset_x_pixel: int + snapshot_offset_y_pixel: int + orientation: Orientation + snaked: bool + + def as_dict(self): + d = asdict(self) + d["orientation"] = self.orientation.value + return d + + +@dataclass(kw_only=True) +class ScanDataInfo: + data_collection_info: DataCollectionInfo + data_collection_id: int | None = None + data_collection_position_info: DataCollectionPositionInfo | None = None + data_collection_grid_info: DataCollectionGridInfo | None = None diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/exp_eye_store.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/exp_eye_store.py new file mode 100644 index 000000000..51acdd0db --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/ispyb/exp_eye_store.py @@ -0,0 +1,125 @@ +import configparser + +from requests import patch, post +from requests.auth import AuthBase + +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils import ( + get_current_time_string, + get_ispyb_config, +) + +RobotActionID = int + + +class BearerAuth(AuthBase): + def __init__(self, token): + self.token = token + + def __call__(self, r): + r.headers["authorization"] = "Bearer " + self.token + return r + + +def _get_base_url_and_token() -> tuple[str, str]: + config = configparser.ConfigParser() + conf = get_ispyb_config() + config.read(conf) + expeye_config = config["expeye"] + return expeye_config["url"], expeye_config["token"] + + +class ExpeyeInteraction: + CREATE_ROBOT_ACTION = "/proposals/{proposal}/sessions/{visit_number}/robot-actions" + UPDATE_ROBOT_ACTION = "/robot-actions/{action_id}" + + def __init__(self) -> None: + url, token = _get_base_url_and_token() + self.base_url = url + "/core" + self.auth = BearerAuth(token) + + def _send_and_get_response(self, url, data, send_func) -> dict: + response = send_func(url, auth=self.auth, json=data) + if not response.ok: + raise ISPyBDepositionNotMade(f"Could not write {data} to {url}: {response}") + return response.json() + + def start_load( + self, + proposal_reference: str, + visit_number: int, + sample_id: int, + dewar_location: int, + container_location: int, + ) -> RobotActionID: + """Create a robot load entry in ispyb. + + Args: + proposal_reference (str): The proposal of the experiment e.g. cm37235 + visit_number (int): The visit number for the proposal, usually this can be + found added to the end of the proposal e.g. the data for + visit number 2 of proposal cm37235 is in cm37235-2 + sample_id (int): The id of the sample in the database + dewar_location (int): Which puck in the dewar the sample is in + container_location (int): Which pin in that puck has the sample + + Returns: + RobotActionID: The id of the robot load action that is created + """ + url = self.base_url + self.CREATE_ROBOT_ACTION.format( + proposal=proposal_reference, visit_number=visit_number + ) + + data = { + "startTimestamp": get_current_time_string(), + "sampleId": sample_id, + "actionType": "LOAD", + "containerLocation": container_location, + "dewarLocation": dewar_location, + } + response = self._send_and_get_response(url, data, post) + return response["robotActionId"] + + def update_barcode_and_snapshots( + self, + action_id: RobotActionID, + barcode: str, + snapshot_before_path: str, + snapshot_after_path: str, + ): + """Update the barcode and snapshots of an existing robot action. + + Args: + action_id (RobotActionID): The id of the action to update + barcode (str): The barcode to give the action + snapshot_before_path (str): Path to the snapshot before robot load + snapshot_after_path (str): Path to the snapshot after robot load + """ + url = self.base_url + self.UPDATE_ROBOT_ACTION.format(action_id=action_id) + + data = { + "sampleBarcode": barcode, + "xtalSnapshotBefore": snapshot_before_path, + "xtalSnapshotAfter": snapshot_after_path, + } + self._send_and_get_response(url, data, patch) + + def end_load(self, action_id: RobotActionID, status: str, reason: str): + """Finish an existing robot action, providing final information about how it went + + Args: + action_id (RobotActionID): The action to finish. + status (str): The status of the action at the end, "success" for success, + otherwise error + reason (str): If the status is in error than the reason for that error + """ + url = self.base_url + self.UPDATE_ROBOT_ACTION.format(action_id=action_id) + + run_status = "SUCCESS" if status == "success" else "ERROR" + + data = { + "endTimestamp": get_current_time_string(), + "status": run_status, + "message": reason, + } + self._send_and_get_response(url, data, patch) diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_dataclass.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_dataclass.py new file mode 100644 index 000000000..39b6146d4 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_dataclass.py @@ -0,0 +1,65 @@ +import builtins +from enum import Enum +from typing import Any + +import numpy as np +from pydantic import BaseModel, validator + +GRIDSCAN_ISPYB_PARAM_DEFAULTS = { + "sample_id": None, + "visit_path": "", + "position": None, + "comment": "Descriptive comment.", +} + + +class IspybParams(BaseModel): + visit_path: str + position: np.ndarray | None + comment: str + sample_id: int | None = None + + # Optional from GDA as populated by Ophyd + xtal_snapshots_omega_start: list[str] | None = None + ispyb_experiment_type: str | None = None + + class Config: + arbitrary_types_allowed = True + json_encoders = {np.ndarray: lambda a: a.tolist()} + + def dict(self, **kwargs): + as_dict = super().dict(**kwargs) + as_dict["position"] = ( + as_dict["position"].tolist() if as_dict["position"] is not None else None + ) + return as_dict + + @validator("position", pre=True) + def _parse_position( + cls, + position: list[int | float] | np.ndarray | None, + values: builtins.dict[str, Any], + ) -> np.ndarray | None: + if position is None: + return None + assert len(position) == 3 + if isinstance(position, np.ndarray): + return position + return np.array(position) + + +class RobotLoadIspybParams(IspybParams): ... + + +class RotationIspybParams(IspybParams): ... + + +class GridscanIspybParams(IspybParams): + def dict(self, **kwargs): + as_dict = super().dict(**kwargs) + return as_dict + + +class Orientation(Enum): + HORIZONTAL = "horizontal" + VERTICAL = "vertical" diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_store.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_store.py new file mode 100755 index 000000000..e35e6cdd9 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_store.py @@ -0,0 +1,276 @@ +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import asdict +from typing import TYPE_CHECKING + +import ispyb +import ispyb.sqlalchemy +from ispyb.connector.mysqlsp.main import ISPyBMySQLSPConnector as Connector +from ispyb.sp.mxacquisition import MXAcquisition +from ispyb.strictordereddict import StrictOrderedDict +from pydantic import BaseModel + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + DataCollectionGroupInfo, + DataCollectionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils import ( + get_current_time_string, + get_session_id_from_visit, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER +from mx_bluesky.hyperion.tracing import TRACER + +if TYPE_CHECKING: + pass + +I03_EIGER_DETECTOR = 78 +EIGER_FILE_SUFFIX = "h5" + + +class IspybIds(BaseModel): + data_collection_ids: tuple[int, ...] = () + data_collection_group_id: int | None = None + grid_ids: tuple[int, ...] = () + + +class StoreInIspyb: + def __init__(self, ispyb_config: str) -> None: + self.ISPYB_CONFIG_PATH: str = ispyb_config + self._data_collection_group_id: int | None + + def begin_deposition( + self, + data_collection_group_info: DataCollectionGroupInfo, + scan_data_infos: Sequence[ScanDataInfo], + ) -> IspybIds: + ispyb_ids = IspybIds() + if scan_data_infos[0].data_collection_info: + ispyb_ids.data_collection_group_id = scan_data_infos[ + 0 + ].data_collection_info.parent_id + + return self._begin_or_update_deposition( + ispyb_ids, data_collection_group_info, scan_data_infos + ) + + def update_deposition( + self, + ispyb_ids, + scan_data_infos: Sequence[ScanDataInfo], + ) -> IspybIds: + assert ( + ispyb_ids.data_collection_group_id + ), "Attempted to store scan data without a collection group" + assert ( + ispyb_ids.data_collection_ids + ), "Attempted to store scan data without a collection" + return self._begin_or_update_deposition(ispyb_ids, None, scan_data_infos) + + def _begin_or_update_deposition( + self, + ispyb_ids, + data_collection_group_info: DataCollectionGroupInfo | None, + scan_data_infos, + ) -> IspybIds: + with ispyb.open(self.ISPYB_CONFIG_PATH) as conn: + assert conn is not None, "Failed to connect to ISPyB" + if data_collection_group_info: + ispyb_ids.data_collection_group_id = ( + self._store_data_collection_group_table( + conn, + data_collection_group_info, + ispyb_ids.data_collection_group_id, + ) + ) + else: + assert ispyb_ids.data_collection_group_id, "Attempt to update data collection without a data collection group ID" + + grid_ids = list(ispyb_ids.grid_ids) + data_collection_ids_out = list(ispyb_ids.data_collection_ids) + for scan_data_info in scan_data_infos: + data_collection_id = scan_data_info.data_collection_id + if ( + scan_data_info.data_collection_info + and not scan_data_info.data_collection_info.parent_id + ): + scan_data_info.data_collection_info.parent_id = ( + ispyb_ids.data_collection_group_id + ) + + new_data_collection_id, grid_id = self._store_single_scan_data( + conn, scan_data_info, data_collection_id + ) + if not data_collection_id: + data_collection_ids_out.append(new_data_collection_id) + if grid_id: + grid_ids.append(grid_id) + ispyb_ids = IspybIds( + data_collection_ids=tuple(data_collection_ids_out), + grid_ids=tuple(grid_ids), + data_collection_group_id=ispyb_ids.data_collection_group_id, + ) + return ispyb_ids + + def end_deposition(self, ispyb_ids: IspybIds, success: str, reason: str): + assert ( + ispyb_ids.data_collection_ids + ), "Can't end ISPyB deposition, data_collection IDs are missing" + assert ( + ispyb_ids.data_collection_group_id is not None + ), "Cannot end ISPyB deposition without data collection group ID" + + for id_ in ispyb_ids.data_collection_ids: + ISPYB_LOGGER.info( + f"End ispyb deposition with status '{success}' and reason '{reason}'." + ) + if success == "fail" or success == "abort": + run_status = "DataCollection Unsuccessful" + else: + run_status = "DataCollection Successful" + current_time = get_current_time_string() + self._update_scan_with_end_time_and_status( + current_time, + run_status, + reason, + id_, + ispyb_ids.data_collection_group_id, + ) + + def append_to_comment( + self, data_collection_id: int, comment: str, delimiter: str = " " + ) -> None: + with ispyb.open(self.ISPYB_CONFIG_PATH) as conn: + assert conn is not None, "Failed to connect to ISPyB!" + mx_acquisition: MXAcquisition = conn.mx_acquisition + mx_acquisition.update_data_collection_append_comments( + data_collection_id, comment, delimiter + ) + + def _update_scan_with_end_time_and_status( + self, + end_time: str, + run_status: str, + reason: str, + data_collection_id: int, + data_collection_group_id: int, + ) -> None: + if reason is not None and reason != "": + self.append_to_comment(data_collection_id, f"{run_status} reason: {reason}") + + with ispyb.open(self.ISPYB_CONFIG_PATH) as conn: + assert conn is not None, "Failed to connect to ISPyB!" + + mx_acquisition: MXAcquisition = conn.mx_acquisition + + params = mx_acquisition.get_data_collection_params() + params["id"] = data_collection_id + params["parentid"] = data_collection_group_id + params["endtime"] = end_time + params["run_status"] = run_status + + mx_acquisition.upsert_data_collection(list(params.values())) + + def _store_position_table( + self, conn: Connector, dc_pos_info, data_collection_id + ) -> int: + mx_acquisition: MXAcquisition = conn.mx_acquisition + + params = mx_acquisition.get_dc_position_params() + params["id"] = data_collection_id + params |= asdict(dc_pos_info) + + return mx_acquisition.update_dc_position(list(params.values())) + + def _store_data_collection_group_table( + self, + conn: Connector, + dcg_info: DataCollectionGroupInfo, + data_collection_group_id: int | None = None, + ) -> int: + mx_acquisition: MXAcquisition = conn.mx_acquisition + + params = mx_acquisition.get_data_collection_group_params() + if data_collection_group_id: + params["id"] = data_collection_group_id + params["parent_id"] = get_session_id_from_visit(conn, dcg_info.visit_string) + params |= {k: v for k, v in asdict(dcg_info).items() if k != "visit_string"} + + return self._upsert_data_collection_group(conn, params) + + def _store_data_collection_table( + self, conn, data_collection_id, data_collection_info + ): + params = self._fill_common_data_collection_params( + conn, data_collection_id, data_collection_info + ) + return self._upsert_data_collection(conn, params) + + def _store_single_scan_data( + self, conn, scan_data_info, data_collection_id=None + ) -> tuple[int, int | None]: + data_collection_id = self._store_data_collection_table( + conn, data_collection_id, scan_data_info.data_collection_info + ) + + if scan_data_info.data_collection_position_info: + self._store_position_table( + conn, + scan_data_info.data_collection_position_info, + data_collection_id, + ) + + grid_id = None + if scan_data_info.data_collection_grid_info: + grid_id = self._store_grid_info_table( + conn, + data_collection_id, + scan_data_info.data_collection_grid_info, + ) + return data_collection_id, grid_id + + def _store_grid_info_table( + self, + conn: Connector, + ispyb_data_collection_id: int, + dc_grid_info: DataCollectionGridInfo, + ) -> int: + mx_acquisition: MXAcquisition = conn.mx_acquisition + params = mx_acquisition.get_dc_grid_params() + params |= dc_grid_info.as_dict() + params["parentid"] = ispyb_data_collection_id + return mx_acquisition.upsert_dc_grid(list(params.values())) + + def _fill_common_data_collection_params( + self, conn, data_collection_id, data_collection_info: DataCollectionInfo + ) -> StrictOrderedDict: + mx_acquisition: MXAcquisition = conn.mx_acquisition + params = mx_acquisition.get_data_collection_params() + + if data_collection_id: + params["id"] = data_collection_id + if data_collection_info.visit_string: + # This is only needed for populating the DataCollectionGroup + params["visit_id"] = get_session_id_from_visit( + conn, data_collection_info.visit_string + ) + params |= { + k: v for k, v in asdict(data_collection_info).items() if k != "visit_string" + } + + return params + + @staticmethod + @TRACER.start_as_current_span("_upsert_data_collection_group") + def _upsert_data_collection_group( + conn: Connector, params: StrictOrderedDict + ) -> int: + return conn.mx_acquisition.upsert_data_collection_group(list(params.values())) + + @staticmethod + @TRACER.start_as_current_span("_upsert_data_collection") + def _upsert_data_collection(conn: Connector, params: StrictOrderedDict) -> int: + return conn.mx_acquisition.upsert_data_collection(list(params.values())) diff --git a/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_utils.py b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_utils.py new file mode 100644 index 000000000..f7dd09af7 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/ispyb/ispyb_utils.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import datetime +import os + +from ispyb import NoResult +from ispyb.connector.mysqlsp.main import ISPyBMySQLSPConnector as Connector +from ispyb.sp.core import Core + +from mx_bluesky.hyperion.parameters.constants import CONST + +VISIT_PATH_REGEX = r".+/([a-zA-Z]{2}\d{4,5}-\d{1,3})(/?$)" + + +def get_ispyb_config(): + return os.environ.get("ISPYB_CONFIG_PATH", CONST.SIM.ISPYB_CONFIG) + + +def get_session_id_from_visit(conn: Connector, visit: str): + try: + core: Core = conn.core + return core.retrieve_visit_id(visit) + except NoResult as e: + raise NoResult(f"No session ID found in ispyb for visit {visit}") from e + + +def get_current_time_string(): + now = datetime.datetime.now() + return now.strftime("%Y-%m-%d %H:%M:%S") diff --git a/src/mx_bluesky/hyperion/external_interaction/nexus/__init__.py b/src/mx_bluesky/hyperion/external_interaction/nexus/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/mx_bluesky/hyperion/external_interaction/nexus/nexus_utils.py b/src/mx_bluesky/hyperion/external_interaction/nexus/nexus_utils.py new file mode 100644 index 000000000..f8f1186a9 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/nexus/nexus_utils.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +import time +from datetime import datetime, timedelta + +import numpy as np +from dodal.devices.detector import DetectorParams +from dodal.devices.zebra import RotationDirection +from nexgen.nxs_utils import Attenuator, Axis, Beam, Detector, EigerDetector, Goniometer +from nexgen.nxs_utils.axes import TransformationType +from numpy.typing import DTypeLike + +from mx_bluesky.hyperion.log import NEXUS_LOGGER +from mx_bluesky.hyperion.utils.utils import convert_eV_to_angstrom + + +def vds_type_based_on_bit_depth(detector_bit_depth: int) -> DTypeLike: + """Works out the datatype for the VDS, based on the bit depth from the detector.""" + if detector_bit_depth == 8: + return np.uint8 + elif detector_bit_depth == 16: + return np.uint16 + elif detector_bit_depth == 32: + return np.uint32 + else: + NEXUS_LOGGER.error( + f"Unknown detector bit depth {detector_bit_depth}, assuming 16-bit" + ) + return np.uint16 + + +def create_goniometer_axes( + omega_start: float, + scan_points: dict | None, + x_y_z_increments: tuple[float, float, float] = (0.0, 0.0, 0.0), + chi: float = 0.0, + phi: float = 0.0, + rotation_direction: RotationDirection = RotationDirection.NEGATIVE, +): + """Returns a Nexgen 'Goniometer' object with the dependency chain of I03's Smargon + goniometer. If scan points is provided these values will be used in preference to + those from the params object. + + Args: + omega_start (float): the starting position of omega, the only extra value that + needs to be specified except for the scan points. + scan_points (dict): a dictionary of points in the scan for each axis. Obtained + by calculating the scan path with scanspec and calling + consume() on it. + x_y_z_increments: optionally, specify the increments between each image for + the x, y, and z axes. Will be ignored if scan_points + is provided. + """ + gonio_axes = [ + Axis( + "omega", + ".", + TransformationType.ROTATION, + (1.0 * rotation_direction.multiplier, 0.0, 0.0), + omega_start, + ), + Axis( + name="sam_z", + depends="omega", + transformation_type=TransformationType.TRANSLATION, + vector=(0.0, 0.0, 1.0), + start_pos=0.0, + increment=x_y_z_increments[2], + ), + Axis( + name="sam_y", + depends="sam_z", + transformation_type=TransformationType.TRANSLATION, + vector=(0.0, 1.0, 0.0), + start_pos=0.0, + increment=x_y_z_increments[1], + ), + Axis( + name="sam_x", + depends="sam_y", + transformation_type=TransformationType.TRANSLATION, + vector=(1.0, 0.0, 0.0), + start_pos=0.0, + increment=x_y_z_increments[0], + ), + Axis( + "chi", "sam_x", TransformationType.ROTATION, (0.006, -0.0264, 0.9996), chi + ), + Axis("phi", "chi", TransformationType.ROTATION, (-1, -0.0025, -0.0056), phi), + ] + return Goniometer(gonio_axes, scan_points) + + +def get_start_and_predicted_end_time(time_expected: float) -> tuple[str, str]: + time_format = r"%Y-%m-%dT%H:%M:%SZ" + start = datetime.utcfromtimestamp(time.time()) + end_est = start + timedelta(seconds=time_expected) + return start.strftime(time_format), end_est.strftime(time_format) + + +def create_detector_parameters(detector_params: DetectorParams) -> Detector: + """Returns the detector information in a format that nexgen wants. + + Args: + detector_params (DetectorParams): The detector params as Hyperion stores them. + + Returns: + Detector: Detector description for nexgen. + """ + detector_pixels = detector_params.get_detector_size_pizels() + + eiger_params = EigerDetector( + "Eiger 16M", (detector_pixels.height, detector_pixels.width), "Si", 46051, 0 + ) + + detector_axes = [ + Axis( + "det_z", + ".", + TransformationType.TRANSLATION, + (0.0, 0.0, 1.0), + detector_params.detector_distance, + ) + ] + # Eiger parameters, axes, beam_center, exp_time, [fast, slow] + return Detector( + eiger_params, + detector_axes, + list( + detector_params.get_beam_position_pixels(detector_params.detector_distance) + ), + detector_params.exposure_time, + [(-1.0, 0.0, 0.0), (0.0, -1.0, 0.0)], + ) + + +def create_beam_and_attenuator_parameters( + energy_kev: float, flux: float, transmission_fraction: float +) -> tuple[Beam, Attenuator]: + """Create beam and attenuator objects that nexgen can understands + + Returns: + tuple[Beam, Attenuator]: Descriptions of the beam and attenuator for nexgen. + """ + return ( + Beam(convert_eV_to_angstrom(energy_kev * 1000), flux), # pyright: ignore + Attenuator(transmission_fraction), # pyright: ignore + ) diff --git a/src/mx_bluesky/hyperion/external_interaction/nexus/write_nexus.py b/src/mx_bluesky/hyperion/external_interaction/nexus/write_nexus.py new file mode 100644 index 000000000..40d90ae00 --- /dev/null +++ b/src/mx_bluesky/hyperion/external_interaction/nexus/write_nexus.py @@ -0,0 +1,114 @@ +""" +Define beamline parameters for I03, Eiger detector and give an example of writing a +gridscan. +""" + +from __future__ import annotations + +import math +from pathlib import Path + +from dodal.devices.zebra import RotationDirection +from dodal.utils import get_beamline_name +from nexgen.nxs_utils import Attenuator, Beam, Detector, Goniometer, Source +from nexgen.nxs_write.nxmx_writer import NXmxFileWriter +from numpy.typing import DTypeLike +from scanspec.core import AxesPoints + +from mx_bluesky.hyperion.external_interaction.nexus.nexus_utils import ( + create_detector_parameters, + create_goniometer_axes, + get_start_and_predicted_end_time, +) +from mx_bluesky.hyperion.parameters.components import DiffractionExperimentWithSample + + +class NexusWriter: + def __init__( + self, + parameters: DiffractionExperimentWithSample, + data_shape: tuple[int, int, int], + scan_points: AxesPoints, + *, + run_number: int | None = None, + omega_start_deg: float = 0, + chi_start_deg: float = 0, + phi_start_deg: float = 0, + vds_start_index: int = 0, + # override default values when there is more than one collection per + # detector arming event: + full_num_of_images: int | None = None, + meta_data_run_number: int | None = None, + rotation_direction: RotationDirection = RotationDirection.NEGATIVE, + ) -> None: + self.beam: Beam | None = None + self.attenuator: Attenuator | None = None + self.scan_points: dict = scan_points + self.data_shape: tuple[int, int, int] = data_shape + self.run_number: int = ( + run_number if run_number else parameters.detector_params.run_number + ) + self.detector: Detector = create_detector_parameters(parameters.detector_params) + self.source: Source = Source(get_beamline_name("S03")) + self.directory: Path = Path(parameters.storage_directory) + self.start_index: int = vds_start_index + self.full_num_of_images: int = full_num_of_images or parameters.num_images + self.data_filename: str = ( + f"{parameters.file_name}_{meta_data_run_number}" + if meta_data_run_number + else parameters.detector_params.full_filename + ) + self.nexus_file: Path = ( + self.directory / f"{parameters.file_name}_{self.run_number}.nxs" + ) + self.master_file: Path = ( + self.directory / f"{parameters.file_name}_{self.run_number}_master.h5" + ) + self.goniometer: Goniometer = create_goniometer_axes( + omega_start_deg, + self.scan_points, + chi=chi_start_deg, + phi=phi_start_deg, + rotation_direction=rotation_direction, + ) + + def create_nexus_file(self, bit_depth: DTypeLike): + """ + Creates a nexus file based on the parameters supplied when this object was + initialised. + """ + start_time, est_end_time = get_start_and_predicted_end_time( + self.detector.exp_time * self.full_num_of_images + ) + + assert self.beam is not None + assert self.attenuator is not None + + vds_shape = self.data_shape + + for filename in [self.nexus_file, self.master_file]: + NXmx_Writer = NXmxFileWriter( + filename, + self.goniometer, + self.detector, + self.source, + self.beam, + self.attenuator, + self.full_num_of_images, + ) + NXmx_Writer.write( + image_filename=f"{self.data_filename}", + start_time=start_time, + est_end_time=est_end_time, + ) + NXmx_Writer.write_vds( + vds_offset=self.start_index, vds_shape=vds_shape, vds_dtype=bit_depth + ) + + def get_image_datafiles(self, max_images_per_file=1000): + return [ + self.directory / f"{self.data_filename}_{h5_num + 1:06}.h5" + for h5_num in range( + math.ceil(self.full_num_of_images / max_images_per_file) + ) + ] diff --git a/src/mx_bluesky/hyperion/log.py b/src/mx_bluesky/hyperion/log.py new file mode 100755 index 000000000..04f200f83 --- /dev/null +++ b/src/mx_bluesky/hyperion/log.py @@ -0,0 +1,99 @@ +import logging +from logging.handlers import TimedRotatingFileHandler +from os import environ +from pathlib import Path + +from dodal.log import ( + ERROR_LOG_BUFFER_LINES, + CircularMemoryHandler, + DodalLogHandlers, + integrate_bluesky_and_ophyd_logging, + set_up_all_logging_handlers, +) +from dodal.log import LOGGER as dodal_logger + +from mx_bluesky.hyperion.parameters.constants import CONST + +LOGGER = logging.getLogger("Hyperion") +LOGGER.setLevel("DEBUG") +LOGGER.parent = dodal_logger +__logger_handlers: DodalLogHandlers | None = None + +ISPYB_LOGGER = logging.getLogger("Hyperion ISPyB and Zocalo callbacks") +ISPYB_LOGGER.setLevel(logging.DEBUG) + +NEXUS_LOGGER = logging.getLogger("Hyperion NeXus callbacks") +NEXUS_LOGGER.setLevel(logging.DEBUG) + +ALL_LOGGERS = [LOGGER, ISPYB_LOGGER, NEXUS_LOGGER] + + +class ExperimentMetadataTagFilter(logging.Filter): + dc_group_id: str | None = None + run_uid: str | None = None + + def filter(self, record): + if self.dc_group_id: + record.dc_group_id = self.dc_group_id + if self.run_uid: + record.run_uid = self.run_uid + return True + + +tag_filter = ExperimentMetadataTagFilter() + + +def set_dcgid_tag(dcgid): + """Set the datacollection group id as a tag on all subsequent log messages. + Setting to None will remove the tag.""" + tag_filter.dc_group_id = dcgid + + +def set_uid_tag(uid): + tag_filter.run_uid = uid + + +def do_default_logging_setup(dev_mode=False): + handlers = set_up_all_logging_handlers( + dodal_logger, + _get_logging_dir(), + "hyperion.log", + dev_mode, + ERROR_LOG_BUFFER_LINES, + CONST.GRAYLOG_PORT, + ) + integrate_bluesky_and_ophyd_logging(dodal_logger) + handlers["graylog_handler"].addFilter(tag_filter) + + global __logger_handlers + __logger_handlers = handlers + + +def _get_debug_handler() -> CircularMemoryHandler: + assert ( + __logger_handlers is not None + ), "You can only use this after running the default logging setup" + return __logger_handlers["debug_memory_handler"] + + +def flush_debug_handler() -> str: + """Writes the contents of the circular debug log buffer to disk and returns the written filename""" + handler = _get_debug_handler() + assert isinstance( + handler.target, TimedRotatingFileHandler + ), "Circular memory handler doesn't have an appropriate fileHandler target" + handler.flush() + return handler.target.baseFilename + + +def _get_logging_dir() -> Path: + """Get the path to write the hyperion log files to. + + If the HYPERION_LOG_DIR environment variable exists then logs will be put in here. + If no environment variable is found it will default it to the ./tmp/dev directory. + + Returns: + logging_path (Path): Path to the log file for the file handler to write to. + """ + logging_path = Path(environ.get("HYPERION_LOG_DIR") or "./tmp/dev/") + return logging_path diff --git a/src/mx_bluesky/hyperion/parameters/__init__.py b/src/mx_bluesky/hyperion/parameters/__init__.py new file mode 100644 index 000000000..40b73128a --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/__init__.py @@ -0,0 +1,2 @@ +"""This module handles the translation between externally supplied parameters and the +internal parameter model.""" diff --git a/src/mx_bluesky/hyperion/parameters/cli.py b/src/mx_bluesky/hyperion/parameters/cli.py new file mode 100644 index 000000000..6aa3cebb2 --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/cli.py @@ -0,0 +1,60 @@ +import argparse + +from pydantic.dataclasses import dataclass + + +@dataclass +class HyperionArgs: + dev_mode: bool = False + use_external_callbacks: bool = False + verbose_event_logging: bool = False + skip_startup_connection: bool = False + + +def _add_callback_relevant_args(parser: argparse.ArgumentParser) -> None: + """adds arguments relevant to hyperion-callbacks.""" + parser.add_argument( + "--dev", + action="store_true", + help="Use dev options, such as local graylog instances and S03", + ) + + +def parse_callback_dev_mode_arg() -> bool: + """Returns the bool representing the 'dev_mode' argument.""" + parser = argparse.ArgumentParser() + _add_callback_relevant_args(parser) + args = parser.parse_args() + return args.dev + + +def parse_cli_args() -> HyperionArgs: + """Parses all arguments relevant to hyperion. Returns an HyperionArgs dataclass with + the fields: (verbose_event_logging: bool, + dev_mode: bool, + skip_startup_connection: bool, + external_callbacks: bool)""" + parser = argparse.ArgumentParser() + _add_callback_relevant_args(parser) + parser.add_argument( + "--verbose-event-logging", + action="store_true", + help="Log all bluesky event documents to graylog", + ) + parser.add_argument( + "--skip-startup-connection", + action="store_true", + help="Skip connecting to EPICS PVs on startup", + ) + parser.add_argument( + "--external-callbacks", + action="store_true", + help="Run the external hyperion-callbacks service and publish events over ZMQ", + ) + args = parser.parse_args() + return HyperionArgs( + verbose_event_logging=args.verbose_event_logging or False, + dev_mode=args.dev or False, + skip_startup_connection=args.skip_startup_connection or False, + use_external_callbacks=args.external_callbacks or False, + ) diff --git a/src/mx_bluesky/hyperion/parameters/components.py b/src/mx_bluesky/hyperion/parameters/components.py new file mode 100644 index 000000000..2a750a0ec --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/components.py @@ -0,0 +1,274 @@ +from __future__ import annotations + +import datetime +import json +from abc import abstractmethod +from collections.abc import Sequence +from enum import StrEnum +from pathlib import Path +from typing import SupportsInt, TypeVar + +from dodal.devices.aperturescatterguard import AperturePositionGDANames +from dodal.devices.detector import ( + DetectorParams, + TriggerMode, +) +from numpy.typing import NDArray +from pydantic import BaseModel, Extra, Field, root_validator, validator +from scanspec.core import AxesPoints +from semver import Version + +from mx_bluesky.hyperion.external_interaction.config_server import FeatureFlags +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import ( + IspybParams, +) +from mx_bluesky.hyperion.parameters.constants import CONST + +T = TypeVar("T") + + +class ParameterVersion(Version): + @classmethod + def _parse(cls, version): + if isinstance(version, cls): + return version + return cls.parse(version) + + @classmethod + def __get_validators__(cls): + """Return a list of validator methods for pydantic models.""" + yield cls._parse + + @classmethod + def __modify_schema__(cls, field_schema): + """Inject/mutate the pydantic field schema in-place.""" + field_schema.update(examples=["1.0.2", "2.15.3-alpha", "21.3.15-beta+12345"]) + + +PARAMETER_VERSION = ParameterVersion.parse("5.0.0") + + +class RotationAxis(StrEnum): + OMEGA = "omega" + PHI = "phi" + CHI = "chi" + KAPPA = "kappa" + + +class XyzAxis(StrEnum): + X = "sam_x" + Y = "sam_y" + Z = "sam_z" + + +class IspybExperimentType(StrEnum): + # Enum values from ispyb column data type + SAD = "SAD" # at or slightly above the peak + SAD_INVERSE_BEAM = "SAD - Inverse Beam" + OSC = "OSC" # "native" (in the absence of a heavy atom) + COLLECT_MULTIWEDGE = ( + "Collect - Multiwedge" # "poorly determined" ~ EDNA complex strategy??? + ) + MAD = "MAD" + HELICAL = "Helical" + MULTI_POSITIONAL = "Multi-positional" + MESH = "Mesh" + BURN = "Burn" + MAD_INVERSE_BEAM = "MAD - Inverse Beam" + CHARACTERIZATION = "Characterization" + DEHYDRATION = "Dehydration" + TOMO = "tomo" + EXPERIMENT = "experiment" + EM = "EM" + PDF = "PDF" + PDF_BRAGG = "PDF+Bragg" + BRAGG = "Bragg" + SINGLE_PARTICLE = "single particle" + SERIAL_FIXED = "Serial Fixed" + SERIAL_JET = "Serial Jet" + STANDARD = "Standard" # Routine structure determination experiment + TIME_RESOLVED = "Time Resolved" # Investigate the change of a system over time + DLS_ANVIL_HP = "Diamond Anvil High Pressure" # HP sample environment pressure cell + CUSTOM = "Custom" # Special or non-standard data collection + XRF_MAP = "XRF map" + ENERGY_SCAN = "Energy scan" + XRF_SPECTRUM = "XRF spectrum" + XRF_MAP_XAS = "XRF map xas" + MESH_3D = "Mesh3D" + SCREENING = "Screening" + STILL = "Still" + SSX_CHIP = "SSX-Chip" + SSX_JET = "SSX-Jet" + + # Aliases for historic hyperion experiment type mapping + ROTATION = "SAD" + GRIDSCAN_2D = "mesh" + GRIDSCAN_3D = "Mesh3D" + + +class HyperionParameters(BaseModel): + class Config: + arbitrary_types_allowed = True + extra = Extra.forbid + json_encoders = { + ParameterVersion: lambda pv: str(pv), + NDArray: lambda a: a.tolist(), + } + + def __hash__(self) -> int: + return self.json().__hash__() + + features: FeatureFlags = Field(default=FeatureFlags()) + parameter_model_version: ParameterVersion + + @validator("parameter_model_version") + def _validate_version(cls, version: ParameterVersion): + assert ( + version >= ParameterVersion(major=PARAMETER_VERSION.major) + ), f"Parameter version too old! This version of hyperion uses {PARAMETER_VERSION}" + assert ( + version <= ParameterVersion(major=PARAMETER_VERSION.major + 1) + ), f"Parameter version too new! This version of hyperion uses {PARAMETER_VERSION}" + return version + + @classmethod + def from_json(cls, input: str | None, *, allow_extras: bool = False): + assert input is not None + if allow_extras: + cls.Config.extra = Extra.ignore # type: ignore + params = cls(**json.loads(input)) + cls.Config.extra = Extra.forbid + return params + + +class WithSnapshot(BaseModel): + snapshot_directory: Path + snapshot_omegas_deg: list[float] | None + + @property + def take_snapshots(self) -> bool: + return bool(self.snapshot_omegas_deg) + + +class DiffractionExperiment(HyperionParameters, WithSnapshot): + """For all experiments which use beam""" + + visit: str = Field(min_length=1) + file_name: str = Field(pattern=r"[\w]{2}[\d]+-[\d]+") + exposure_time_s: float = Field(gt=0) + comment: str = Field(default="") + beamline: str = Field(default=CONST.I03.BEAMLINE, pattern=r"BL\d{2}[BIJS]") + insertion_prefix: str = Field( + default=CONST.I03.INSERTION_PREFIX, pattern=r"SR\d{2}[BIJS]" + ) + det_dist_to_beam_converter_path: str = Field( + default=CONST.PARAM.DETECTOR.BEAM_XY_LUT_PATH + ) + zocalo_environment: str = Field(default=CONST.ZOCALO_ENV) + trigger_mode: TriggerMode = Field(default=TriggerMode.FREE_RUN) + detector_distance_mm: float | None = Field(default=None, gt=0) + demand_energy_ev: float | None = Field(default=None, gt=0) + run_number: int | None = Field(default=None, ge=0) + selected_aperture: AperturePositionGDANames | None = Field(default=None) + transmission_frac: float = Field(default=0.1) + ispyb_experiment_type: IspybExperimentType + storage_directory: str + + @root_validator(pre=True) + def validate_snapshot_directory(cls, values): + snapshot_dir = values.get( + "snapshot_directory", Path(values["storage_directory"], "snapshots") + ) + values["snapshot_directory"] = ( + snapshot_dir if isinstance(snapshot_dir, Path) else Path(snapshot_dir) + ) + return values + + @property + def visit_directory(self) -> Path: + return ( + Path(CONST.I03.BASE_DATA_DIR) / str(datetime.date.today().year) / self.visit + ) + + @property + def num_images(self) -> int: + return 0 + + @property + @abstractmethod + def detector_params(self) -> DetectorParams: ... + + @property + @abstractmethod + def ispyb_params(self) -> IspybParams: # Soon to remove + ... + + +class WithScan(BaseModel): + """For experiments where the scan is known""" + + @property + @abstractmethod + def scan_points(self) -> AxesPoints: ... + + @property + @abstractmethod + def num_images(self) -> int: ... + + +class SplitScan(BaseModel): + @property + @abstractmethod + def scan_indices(self) -> Sequence[SupportsInt]: + """Should return the first index of each scan (i.e. for each nexus file)""" + ... + + +class WithSample(BaseModel): + sample_id: int + sample_puck: int | None = None + sample_pin: int | None = None + + +class DiffractionExperimentWithSample(DiffractionExperiment, WithSample): ... + + +class WithOavCentring(BaseModel): + oav_centring_file: str = Field(default=CONST.I03.OAV_CENTRING_FILE) + + +class OptionalXyzStarts(BaseModel): + x_start_um: float | None = None + y_start_um: float | None = None + z_start_um: float | None = None + + +class XyzStarts(BaseModel): + x_start_um: float + y_start_um: float + z_start_um: float + + def _start_for_axis(self, axis: XyzAxis) -> float: + match axis: + case XyzAxis.X: + return self.x_start_um + case XyzAxis.Y: + return self.y_start_um + case XyzAxis.Z: + return self.z_start_um + + +class OptionalGonioAngleStarts(BaseModel): + omega_start_deg: float | None = None + phi_start_deg: float | None = None + chi_start_deg: float | None = None + kappa_start_deg: float | None = None + + +class TemporaryIspybExtras(BaseModel): + # for while we still need ISpyB params - to be removed in #1277 and/or #43 + class Config: + arbitrary_types_allowed = True + extra = Extra.forbid + + xtal_snapshots_omega_start: list[str] | None = None diff --git a/src/mx_bluesky/hyperion/parameters/constants.py b/src/mx_bluesky/hyperion/parameters/constants.py new file mode 100644 index 000000000..98539b5c3 --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/constants.py @@ -0,0 +1,149 @@ +import os +from enum import Enum + +from dodal.devices.detector import EIGER2_X_16M_SIZE +from pydantic.dataclasses import dataclass + +TEST_MODE = os.environ.get("HYPERION_TEST_MODE") + + +@dataclass(frozen=True) +class SimConstants: + BEAMLINE = "BL03S" + INSERTION_PREFIX = "SR03S" + ZOCALO_ENV = "dev_artemis" + # this one is for unit tests + ISPYB_CONFIG = "tests/test_data/test_config.cfg" + # this one is for system tests + DEV_ISPYB_DATABASE_CFG = "/dls_sw/dasc/mariadb/credentials/ispyb-hyperion-dev.cfg" + + +@dataclass(frozen=True) +class PlanNameConstants: + # Robot load subplan + ROBOT_LOAD = "robot_load" + # Gridscan + GRID_DETECT_AND_DO_GRIDSCAN = "grid_detect_and_do_gridscan" + GRID_DETECT_INNER = "grid_detect" + GRIDSCAN_OUTER = "run_gridscan_move_and_tidy" + GRIDSCAN_AND_MOVE = "run_gridscan_and_move" + GRIDSCAN_MAIN = "run_gridscan" + DO_FGS = "do_fgs" + # Rotation scan + ROTATION_MULTI = "multi_rotation_wrapper" + ROTATION_OUTER = "rotation_scan_with_cleanup" + ROTATION_MAIN = "rotation_scan_main" + + +@dataclass(frozen=True) +class PlanGroupCheckpointConstants: + # For places to synchronise / stop and wait in plans, use as bluesky group names + # Gridscan + GRID_READY_FOR_DC = "ready_for_data_collection" + MOVE_GONIO_TO_START = "move_gonio_to_start" + + +@dataclass(frozen=True) +class DocDescriptorNames: + # Robot load event descriptor + ROBOT_LOAD = "robot_load" + # For callbacks to use + OAV_ROTATION_SNAPSHOT_TRIGGERED = "rotation_snapshot_triggered" + OAV_GRID_SNAPSHOT_TRIGGERED = "snapshot_to_ispyb" + HARDWARE_READ_PRE = "read_hardware_for_callbacks_pre_collection" + HARDWARE_READ_DURING = "read_hardware_for_callbacks_during_collection" + ZOCALO_HW_READ = "zocalo_read_hardware_plan" + + +@dataclass(frozen=True) +class HardwareConstants: + OAV_REFRESH_DELAY = 0.3 + PANDA_FGS_RUN_UP_DEFAULT = 0.17 + CRYOJET_MARGIN_MM = 0.2 + + +@dataclass(frozen=True) +class TriggerConstants: + ZOCALO = "trigger_zocalo_on" + + +@dataclass(frozen=True) +class GridscanParamConstants: + WIDTH_UM = 600.0 + EXPOSURE_TIME_S = 0.02 + USE_ROI = True + BOX_WIDTH_UM = 20.0 + OMEGA_1 = 0.0 + OMEGA_2 = 90.0 + + +@dataclass(frozen=True) +class DetectorParamConstants: + BEAM_XY_LUT_PATH = ( + "tests/test_data/test_det_dist_converter.txt" + if TEST_MODE + else "/dls_sw/i03/software/daq_configuration/lookup/DetDistToBeamXYConverter.txt" + ) + + +@dataclass(frozen=True) +class ExperimentParamConstants: + DETECTOR = DetectorParamConstants() + GRIDSCAN = GridscanParamConstants() + + +_test_oav_file = "tests/test_data/test_OAVCentring.json" +_live_oav_file = "/dls_sw/i03/software/daq_configuration/json/OAVCentring_hyperion.json" + + +@dataclass(frozen=True) +class I03Constants: + BASE_DATA_DIR = "/tmp/dls/i03/data/" if TEST_MODE else "/dls/i03/data/" + BEAMLINE = "BL03S" if TEST_MODE else "BL03I" + DETECTOR = EIGER2_X_16M_SIZE + INSERTION_PREFIX = "SR03S" if TEST_MODE else "SR03I" + OAV_CENTRING_FILE = _test_oav_file if TEST_MODE else _live_oav_file + SHUTTER_TIME_S = 0.06 + USE_PANDA_FOR_GRIDSCAN = False + USE_GPU_FOR_GRIDSCAN_ANALYSIS = False + THAWING_TIME = 20 + + +@dataclass(frozen=True) +class HyperionConstants: + HARDWARE = HardwareConstants() + I03 = I03Constants() + PARAM = ExperimentParamConstants() + PLAN = PlanNameConstants() + WAIT = PlanGroupCheckpointConstants() + SIM = SimConstants() + TRIGGER = TriggerConstants() + CALLBACK_0MQ_PROXY_PORTS = (5577, 5578) + DESCRIPTORS = DocDescriptorNames() + CONFIG_SERVER_URL = ( + "http://fake-url-not-real" + if TEST_MODE + else "https://daq-config.diamond.ac.uk/api" + ) + GRAYLOG_PORT = 12232 + PARAMETER_SCHEMA_DIRECTORY = "src/hyperion/parameters/schemas/" + ZOCALO_ENV = "dev_artemis" if TEST_MODE else "artemis" + + +CONST = HyperionConstants() + + +class Actions(Enum): + START = "start" + STOP = "stop" + SHUTDOWN = "shutdown" + STATUS = "status" + + +class Status(Enum): + WARN = "Warn" + FAILED = "Failed" + SUCCESS = "Success" + BUSY = "Busy" + ABORTING = "Aborting" + IDLE = "Idle" diff --git a/src/mx_bluesky/hyperion/parameters/gridscan.py b/src/mx_bluesky/hyperion/parameters/gridscan.py new file mode 100644 index 000000000..a48cd5b4f --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/gridscan.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import os + +from dodal.devices.aperturescatterguard import AperturePositionGDANames +from dodal.devices.detector import ( + DetectorDistanceToBeamXYConverter, + DetectorParams, +) +from dodal.devices.fast_grid_scan import ( + PandAGridScanParams, + ZebraGridScanParams, +) +from pydantic import Field, PrivateAttr +from scanspec.core import Path as ScanPath +from scanspec.specs import Line, Static + +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import ( + GridscanIspybParams, +) +from mx_bluesky.hyperion.parameters.components import ( + DiffractionExperimentWithSample, + IspybExperimentType, + OptionalGonioAngleStarts, + SplitScan, + WithOavCentring, + WithScan, + XyzStarts, +) +from mx_bluesky.hyperion.parameters.constants import CONST, I03Constants + + +class GridCommon( + DiffractionExperimentWithSample, OptionalGonioAngleStarts, WithOavCentring +): + grid_width_um: float = Field(default=CONST.PARAM.GRIDSCAN.WIDTH_UM) + exposure_time_s: float = Field(default=CONST.PARAM.GRIDSCAN.EXPOSURE_TIME_S) + use_roi_mode: bool = Field(default=CONST.PARAM.GRIDSCAN.USE_ROI) + panda_runup_distance_mm: float = Field( + default=CONST.HARDWARE.PANDA_FGS_RUN_UP_DEFAULT + ) + use_panda: bool = Field(default=CONST.I03.USE_PANDA_FOR_GRIDSCAN) + use_gpu: bool = Field(default=CONST.I03.USE_GPU_FOR_GRIDSCAN_ANALYSIS) + ispyb_experiment_type: IspybExperimentType = Field( + default=IspybExperimentType.GRIDSCAN_3D + ) + selected_aperture: AperturePositionGDANames | None = Field( + default=AperturePositionGDANames.SMALL_APERTURE + ) + + @property + def ispyb_params(self): + return GridscanIspybParams( + visit_path=str(self.visit_directory), + comment=self.comment, + sample_id=self.sample_id, + ispyb_experiment_type=self.ispyb_experiment_type, + position=None, + ) + + @property + def detector_params(self): + self.det_dist_to_beam_converter_path = ( + self.det_dist_to_beam_converter_path + or CONST.PARAM.DETECTOR.BEAM_XY_LUT_PATH + ) + optional_args = {} + if self.run_number: + optional_args["run_number"] = self.run_number + assert ( + self.detector_distance_mm is not None + ), "Detector distance must be filled before generating DetectorParams" + os.makedirs(self.storage_directory, exist_ok=True) + return DetectorParams( + detector_size_constants=I03Constants.DETECTOR, + expected_energy_ev=self.demand_energy_ev, + exposure_time=self.exposure_time_s, + directory=self.storage_directory, + prefix=self.file_name, + detector_distance=self.detector_distance_mm, + omega_start=self.omega_start_deg or 0, + omega_increment=0, + num_images_per_trigger=1, + num_triggers=self.num_images, + use_roi_mode=self.use_roi_mode, + det_dist_to_beam_converter_path=self.det_dist_to_beam_converter_path, + trigger_mode=self.trigger_mode, + beam_xy_converter=DetectorDistanceToBeamXYConverter( + self.det_dist_to_beam_converter_path + ), + enable_dev_shm=self.use_gpu, + **optional_args, + ) + + +class GridScanWithEdgeDetect(GridCommon): ... + + +class PinTipCentreThenXrayCentre(GridCommon): + tip_offset_um: float = 0 + + +class RobotLoadThenCentre(GridCommon): + thawing_time: float = Field(default=CONST.I03.THAWING_TIME) + + def pin_centre_then_xray_centre_params(self): + my_params = self.dict() + del my_params["thawing_time"] + return PinTipCentreThenXrayCentre(**my_params) + + +class SpecifiedGridScan(GridCommon, XyzStarts, WithScan): + """A specified grid scan is one which has defined values for the start position, + grid and box sizes, etc., as opposed to parameters for a plan which will create + those parameters at some point (e.g. through optical pin detection).""" + + ... + + +class ThreeDGridScan(SpecifiedGridScan, SplitScan): + """Parameters representing a so-called 3D grid scan, which consists of doing a + gridscan in X and Y, followed by one in X and Z.""" + + demand_energy_ev: float | None = Field(default=None) + grid1_omega_deg: float = Field(default=CONST.PARAM.GRIDSCAN.OMEGA_1) # type: ignore + grid2_omega_deg: float = Field(default=CONST.PARAM.GRIDSCAN.OMEGA_2) + x_step_size_um: float = Field(default=CONST.PARAM.GRIDSCAN.BOX_WIDTH_UM) + y_step_size_um: float = Field(default=CONST.PARAM.GRIDSCAN.BOX_WIDTH_UM) + z_step_size_um: float = Field(default=CONST.PARAM.GRIDSCAN.BOX_WIDTH_UM) + y2_start_um: float + z2_start_um: float + x_steps: int = Field(gt=0) + y_steps: int = Field(gt=0) + z_steps: int = Field(gt=0) + _set_stub_offsets: bool = PrivateAttr(default_factory=lambda: False) + + @property + def FGS_params(self) -> ZebraGridScanParams: + return ZebraGridScanParams( + x_steps=self.x_steps, + y_steps=self.y_steps, + z_steps=self.z_steps, + x_step_size=self.x_step_size_um, + y_step_size=self.y_step_size_um, + z_step_size=self.z_step_size_um, + x_start=self.x_start_um, + y1_start=self.y_start_um, + z1_start=self.z_start_um, + y2_start=self.y2_start_um, + z2_start=self.z2_start_um, + set_stub_offsets=self.features.set_stub_offsets, + dwell_time_ms=self.exposure_time_s * 1000, + transmission_fraction=self.transmission_frac, + ) + + @property + def panda_FGS_params(self) -> PandAGridScanParams: + if self.y_steps % 2 and self.z_steps > 0: + raise OddYStepsException( + "The number of Y steps must be even for a PandA gridscan" + ) + return PandAGridScanParams( + x_steps=self.x_steps, + y_steps=self.y_steps, + z_steps=self.z_steps, + x_step_size=self.x_step_size_um, + y_step_size=self.y_step_size_um, + z_step_size=self.z_step_size_um, + x_start=self.x_start_um, + y1_start=self.y_start_um, + z1_start=self.z_start_um, + y2_start=self.y2_start_um, + z2_start=self.z2_start_um, + set_stub_offsets=self.features.set_stub_offsets, + run_up_distance_mm=self.panda_runup_distance_mm, + transmission_fraction=self.transmission_frac, + ) + + def do_set_stub_offsets(self, value: bool): + self._set_stub_offsets = value + + @property + def grid_1_spec(self): + x_end = self.x_start_um + self.x_step_size_um * (self.x_steps - 1) + y1_end = self.y_start_um + self.y_step_size_um * (self.y_steps - 1) + grid_1_x = Line("sam_x", self.x_start_um, x_end, self.x_steps) + grid_1_y = Line("sam_y", self.y_start_um, y1_end, self.y_steps) + grid_1_z = Static("sam_z", self.z_start_um) + return grid_1_y.zip(grid_1_z) * ~grid_1_x + + @property + def grid_2_spec(self): + x_end = self.x_start_um + self.x_step_size_um * (self.x_steps - 1) + z2_end = self.z2_start_um + self.z_step_size_um * (self.z_steps - 1) + grid_2_x = Line("sam_x", self.x_start_um, x_end, self.x_steps) + grid_2_z = Line("sam_z", self.z2_start_um, z2_end, self.z_steps) + grid_2_y = Static("sam_y", self.y2_start_um) + return grid_2_z.zip(grid_2_y) * ~grid_2_x + + @property + def scan_indices(self): + """The first index of each gridscan, useful for writing nexus files/VDS""" + return [ + 0, + len(ScanPath(self.grid_1_spec.calculate()).consume().midpoints["sam_x"]), + ] + + @property + def scan_spec(self): + """A fully specified ScanSpec object representing both grids, with x, y, z and + omega positions.""" + return self.grid_1_spec.concat(self.grid_2_spec) + + @property + def scan_points(self): + """A list of all the points in the scan_spec.""" + return ScanPath(self.scan_spec.calculate()).consume().midpoints + + @property + def scan_points_first_grid(self): + """A list of all the points in the first grid scan.""" + return ScanPath(self.grid_1_spec.calculate()).consume().midpoints + + @property + def scan_points_second_grid(self): + """A list of all the points in the second grid scan.""" + return ScanPath(self.grid_2_spec.calculate()).consume().midpoints + + @property + def num_images(self) -> int: + return len(self.scan_points["sam_x"]) + + +class OddYStepsException(Exception): ... diff --git a/src/mx_bluesky/hyperion/parameters/rotation.py b/src/mx_bluesky/hyperion/parameters/rotation.py new file mode 100644 index 000000000..63a472dac --- /dev/null +++ b/src/mx_bluesky/hyperion/parameters/rotation.py @@ -0,0 +1,166 @@ +from __future__ import annotations + +import os +from collections.abc import Iterator +from itertools import accumulate +from typing import Annotated + +from annotated_types import Len +from dodal.devices.detector import DetectorParams +from dodal.devices.detector.det_dist_to_beam_converter import ( + DetectorDistanceToBeamXYConverter, +) +from dodal.devices.zebra import ( + RotationDirection, +) +from pydantic import Field, root_validator +from scanspec.core import AxesPoints +from scanspec.core import Path as ScanPath +from scanspec.specs import Line + +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import ( + RotationIspybParams, +) +from mx_bluesky.hyperion.parameters.components import ( + DiffractionExperimentWithSample, + IspybExperimentType, + OptionalGonioAngleStarts, + OptionalXyzStarts, + RotationAxis, + SplitScan, + TemporaryIspybExtras, + WithScan, +) +from mx_bluesky.hyperion.parameters.constants import CONST, I03Constants + + +class RotationScanPerSweep(OptionalGonioAngleStarts, OptionalXyzStarts): + omega_start_deg: float = Field(default=0) # type: ignore + rotation_axis: RotationAxis = Field(default=RotationAxis.OMEGA) + scan_width_deg: float = Field(default=360, gt=0) + rotation_direction: RotationDirection = Field(default=RotationDirection.NEGATIVE) + nexus_vds_start_img: int = Field(default=0, ge=0) + ispyb_extras: TemporaryIspybExtras | None + + +class RotationExperiment(DiffractionExperimentWithSample): + shutter_opening_time_s: float = Field(default=CONST.I03.SHUTTER_TIME_S) + rotation_increment_deg: float = Field(default=0.1, gt=0) + ispyb_experiment_type: IspybExperimentType = Field( + default=IspybExperimentType.ROTATION + ) + + def _detector_params(self, omega_start_deg: float): + self.det_dist_to_beam_converter_path: str = ( + self.det_dist_to_beam_converter_path + or CONST.PARAM.DETECTOR.BEAM_XY_LUT_PATH + ) + optional_args = {} + if self.run_number: + optional_args["run_number"] = self.run_number + assert self.detector_distance_mm is not None + os.makedirs(self.storage_directory, exist_ok=True) + return DetectorParams( + detector_size_constants=I03Constants.DETECTOR, + expected_energy_ev=self.demand_energy_ev, + exposure_time=self.exposure_time_s, + directory=self.storage_directory, + prefix=self.file_name, + detector_distance=self.detector_distance_mm, + omega_start=omega_start_deg, + omega_increment=self.rotation_increment_deg, + num_images_per_trigger=self.num_images, + num_triggers=1, + use_roi_mode=False, + det_dist_to_beam_converter_path=self.det_dist_to_beam_converter_path, + beam_xy_converter=DetectorDistanceToBeamXYConverter( + self.det_dist_to_beam_converter_path + ), + **optional_args, + ) + + +class RotationScan(WithScan, RotationScanPerSweep, RotationExperiment): + @property + def ispyb_params(self): # pyright: ignore + return RotationIspybParams( + visit_path=str(self.visit_directory), + comment=self.comment, + xtal_snapshots_omega_start=( + self.ispyb_extras.xtal_snapshots_omega_start + if self.ispyb_extras + else [] + ), + ispyb_experiment_type=self.ispyb_experiment_type, + position=None, + ) + + @property + def detector_params(self): + return self._detector_params(self.omega_start_deg) + + @property + def scan_points(self) -> AxesPoints: + scan_spec = Line( + axis="omega", + start=self.omega_start_deg, + stop=( + self.omega_start_deg + + (self.scan_width_deg - self.rotation_increment_deg) + ), + num=self.num_images, + ) + scan_path = ScanPath(scan_spec.calculate()) + return scan_path.consume().midpoints + + @property + def num_images(self) -> int: + return int(self.scan_width_deg / self.rotation_increment_deg) + + +class MultiRotationScan(RotationExperiment, SplitScan): + rotation_scans: Annotated[list[RotationScanPerSweep], Len(min_length=1)] + + def _single_rotation_scan(self, scan: RotationScanPerSweep) -> RotationScan: + # self has everything from RotationExperiment + params = self.dict() + del params["rotation_scans"] + # provided `scan` has everything from RotationScanPerSweep + params.update(scan.dict()) + # together they have everything for RotationScan + return RotationScan(**params) + + @root_validator(pre=False) # type: ignore + def validate_snapshot_directory(cls, values): + start_img = 0 + for scan in values["rotation_scans"]: + scan.nexus_vds_start_img = start_img + start_img += scan.scan_width_deg / values["rotation_increment_deg"] + return values + + @property + def single_rotation_scans(self) -> Iterator[RotationScan]: + for scan in self.rotation_scans: + yield self._single_rotation_scan(scan) + + def _num_images_per_scan(self): + return [ + int(scan.scan_width_deg / self.rotation_increment_deg) + for scan in self.rotation_scans + ] + + @property + def num_images(self): + return sum(self._num_images_per_scan()) + + @property + def scan_indices(self): + return list(accumulate([0, *self._num_images_per_scan()])) + + @property + def detector_params(self): + return self._detector_params(self.rotation_scans[0].omega_start_deg) + + @property + def ispyb_params(self): # pyright: ignore + raise ValueError("Please get ispyb params from one of the individual scans") diff --git a/src/mx_bluesky/hyperion/resources/panda/panda-gridscan.yaml b/src/mx_bluesky/hyperion/resources/panda/panda-gridscan.yaml new file mode 100644 index 000000000..337dad0e9 --- /dev/null +++ b/src/mx_bluesky/hyperion/resources/panda/panda-gridscan.yaml @@ -0,0 +1,964 @@ +- calc.1.out_units: '' + calc.2.out_units: '' + clock.1.period_units: ms + clock.2.period_units: s + counter.1.out_units: '' + counter.2.out_units: '' + counter.3.out_units: '' + counter.4.out_units: '' + counter.5.out_units: '' + counter.6.out_units: '' + counter.7.out_units: '' + counter.8.out_units: '' + filter.1.out_units: '' + filter.2.out_units: '' + inenc.1.clk_period_units: s + inenc.1.frame_period_units: s + inenc.1.val_units: mm + inenc.2.clk_period_units: s + inenc.2.frame_period_units: s + inenc.2.val_units: mm + inenc.3.clk_period_units: s + inenc.3.frame_period_units: s + inenc.3.val_units: mm + inenc.4.clk_period_units: s + inenc.4.frame_period_units: s + inenc.4.val_units: '' + outenc.1.qperiod_units: s + outenc.2.qperiod_units: s + outenc.3.qperiod_units: s + outenc.4.qperiod_units: s + pgen.1.out_units: '' + pgen.2.out_units: '' + pulse.1.delay_units: s + pulse.1.step_units: s + pulse.1.width_units: s + pulse.2.delay_units: s + pulse.2.step_units: s + pulse.2.width_units: s + pulse.3.delay_units: s + pulse.3.step_units: s + pulse.3.width_units: s + pulse.4.delay_units: s + pulse.4.step_units: s + pulse.4.width_units: s + seq.1.prescale_units: us + seq.2.prescale_units: s + sfp3_sync_in.pos1_units: '' + sfp3_sync_in.pos2_units: '' + sfp3_sync_in.pos3_units: '' + sfp3_sync_in.pos4_units: '' +- bits.a: '0' + bits.b: '0' + bits.c: '0' + bits.d: '0' + bits.label: Soft inputs and constant bits + calc.1.func: A+B+C+D + calc.1.inpa: ZERO + calc.1.inpb: ZERO + calc.1.inpc: ZERO + calc.1.inpd: ZERO + calc.1.label: Position calc + calc.1.out_capture: 'No' + calc.1.out_dataset: '' + calc.1.out_offset: 0.0 + calc.1.out_scale: 1.0 + calc.1.shift: 0.0 + calc.1.typea: Value + calc.1.typeb: Value + calc.1.typec: Value + calc.1.typed: Value + calc.2.func: A+B+C+D + calc.2.inpa: ZERO + calc.2.inpb: ZERO + calc.2.inpc: ZERO + calc.2.inpd: ZERO + calc.2.label: Position calc + calc.2.out_capture: 'No' + calc.2.out_dataset: '' + calc.2.out_offset: 0.0 + calc.2.out_scale: 1.0 + calc.2.shift: 0.0 + calc.2.typea: Value + calc.2.typeb: Value + calc.2.typec: Value + calc.2.typed: Value + clock.1.enable: ZERO + clock.1.enable_delay: 0 + clock.1.label: Configurable clocks + clock.1.period: 2.001 + clock.2.enable: ZERO + clock.2.enable_delay: 0 + clock.2.label: Configurable clocks + clock.2.period: 0.0 + counter.1.dir: ZERO + counter.1.dir_delay: 0 + counter.1.enable: ZERO + counter.1.enable_delay: 0 + counter.1.label: Up/Down pulse counter + counter.1.max: 0 + counter.1.min: 0 + counter.1.out_capture: 'No' + counter.1.out_dataset: '' + counter.1.out_offset: 0.0 + counter.1.out_scale: 1.0 + counter.1.start: 0 + counter.1.step: 0.0 + counter.1.trig: PULSE1.OUT + counter.1.trig_delay: 0 + counter.2.dir: ZERO + counter.2.dir_delay: 0 + counter.2.enable: ZERO + counter.2.enable_delay: 0 + counter.2.label: Up/Down pulse counter + counter.2.max: 0 + counter.2.min: 0 + counter.2.out_capture: 'No' + counter.2.out_dataset: '' + counter.2.out_offset: 0.0 + counter.2.out_scale: 1.0 + counter.2.start: 0 + counter.2.step: 0.0 + counter.2.trig: ZERO + counter.2.trig_delay: 0 + counter.3.dir: ZERO + counter.3.dir_delay: 0 + counter.3.enable: ZERO + counter.3.enable_delay: 0 + counter.3.label: Up/Down pulse counter + counter.3.max: 0 + counter.3.min: 0 + counter.3.out_capture: 'No' + counter.3.out_dataset: '' + counter.3.out_offset: 0.0 + counter.3.out_scale: 1.0 + counter.3.start: 0 + counter.3.step: 0.0 + counter.3.trig: ZERO + counter.3.trig_delay: 0 + counter.4.dir: ZERO + counter.4.dir_delay: 0 + counter.4.enable: ZERO + counter.4.enable_delay: 0 + counter.4.label: Up/Down pulse counter + counter.4.max: 0 + counter.4.min: 0 + counter.4.out_capture: 'No' + counter.4.out_dataset: '' + counter.4.out_offset: 0.0 + counter.4.out_scale: 1.0 + counter.4.start: 0 + counter.4.step: 0.0 + counter.4.trig: ZERO + counter.4.trig_delay: 0 + counter.5.dir: ZERO + counter.5.dir_delay: 0 + counter.5.enable: ZERO + counter.5.enable_delay: 0 + counter.5.label: Up/Down pulse counter + counter.5.max: 0 + counter.5.min: 0 + counter.5.out_capture: 'No' + counter.5.out_dataset: '' + counter.5.out_offset: 0.0 + counter.5.out_scale: 1.0 + counter.5.start: 0 + counter.5.step: 0.0 + counter.5.trig: ZERO + counter.5.trig_delay: 0 + counter.6.dir: ZERO + counter.6.dir_delay: 0 + counter.6.enable: ZERO + counter.6.enable_delay: 0 + counter.6.label: Up/Down pulse counter + counter.6.max: 0 + counter.6.min: 0 + counter.6.out_capture: 'No' + counter.6.out_dataset: '' + counter.6.out_offset: 0.0 + counter.6.out_scale: 1.0 + counter.6.start: 0 + counter.6.step: 0.0 + counter.6.trig: ZERO + counter.6.trig_delay: 0 + counter.7.dir: ZERO + counter.7.dir_delay: 0 + counter.7.enable: ZERO + counter.7.enable_delay: 0 + counter.7.label: Up/Down pulse counter + counter.7.max: 0 + counter.7.min: 0 + counter.7.out_capture: 'No' + counter.7.out_dataset: '' + counter.7.out_offset: 0.0 + counter.7.out_scale: 1.0 + counter.7.start: 0 + counter.7.step: 0.0 + counter.7.trig: ZERO + counter.7.trig_delay: 0 + counter.8.dir: ZERO + counter.8.dir_delay: 0 + counter.8.enable: ZERO + counter.8.enable_delay: 0 + counter.8.label: Up/Down pulse counter + counter.8.max: 0 + counter.8.min: 0 + counter.8.out_capture: 'No' + counter.8.out_dataset: '' + counter.8.out_offset: 0.0 + counter.8.out_scale: 1.0 + counter.8.start: 0 + counter.8.step: 0.0 + counter.8.trig: ZERO + counter.8.trig_delay: 0 + div.1.divisor: 0.0 + div.1.enable: ZERO + div.1.enable_delay: 0 + div.1.first_pulse: OutN + div.1.inp: ZERO + div.1.inp_delay: 0 + div.1.label: Pulse divider + div.2.divisor: 0.0 + div.2.enable: ZERO + div.2.enable_delay: 0 + div.2.first_pulse: OutN + div.2.inp: ZERO + div.2.inp_delay: 0 + div.2.label: Pulse divider + filter.1.enable: ZERO + filter.1.enable_delay: 0 + filter.1.inp: ZERO + filter.1.label: Filter block modes are Difference and Divider + filter.1.mode: difference + filter.1.out_capture: 'No' + filter.1.out_dataset: '' + filter.1.out_offset: 0.0 + filter.1.out_scale: 1.0 + filter.1.trig: ZERO + filter.1.trig_delay: 0 + filter.2.enable: ZERO + filter.2.enable_delay: 0 + filter.2.inp: ZERO + filter.2.label: Filter block modes are Difference and Divider + filter.2.mode: difference + filter.2.out_capture: 'No' + filter.2.out_dataset: '' + filter.2.out_offset: 0.0 + filter.2.out_scale: 1.0 + filter.2.trig: ZERO + filter.2.trig_delay: 0 + fmc_24v_in.db: None + fmc_24v_in.label: FMC 24V IO Module Input + fmc_24v_in.vtsel: 5V + fmc_24v_out.config: 0.0 + fmc_24v_out.en: Disable + fmc_24v_out.fltr: 'Off' + fmc_24v_out.label: FMC 24V IO Module Input + fmc_24v_out.pushpl: High-side + fmc_24v_out.pwr_on: 'Off' + fmc_24v_out.srial: Parallel + fmc_24v_out.val1: ZERO + fmc_24v_out.val1_delay: 0 + fmc_24v_out.val2: ZERO + fmc_24v_out.val2_delay: 0 + fmc_24v_out.val3: ZERO + fmc_24v_out.val3_delay: 0 + fmc_24v_out.val4: ZERO + fmc_24v_out.val4_delay: 0 + fmc_24v_out.val5: ZERO + fmc_24v_out.val5_delay: 0 + fmc_24v_out.val6: ZERO + fmc_24v_out.val6_delay: 0 + fmc_24v_out.val7: ZERO + fmc_24v_out.val7_delay: 0 + fmc_24v_out.val8: ZERO + fmc_24v_out.val8_delay: 0 + inenc.1.bits: 0.0 + inenc.1.clk: ZERO + inenc.1.clk_delay: 0 + inenc.1.clk_period: 0.0 + inenc.1.clk_src: Internally Generated + inenc.1.frame_period: 0.0 + inenc.1.label: Y + inenc.1.lsb_discard: 0.0 + inenc.1.msb_discard: 0.0 + inenc.1.protocol: Quadrature + inenc.1.rst_on_z: '0' + inenc.1.setp: 0 + inenc.1.val_capture: Min Max Mean + inenc.1.val_dataset: '' + inenc.1.val_offset: 0.0 + inenc.1.val_scale: 5.0e-06 + inenc.2.bits: 0.0 + inenc.2.clk: ZERO + inenc.2.clk_delay: 0 + inenc.2.clk_period: 0.0 + inenc.2.clk_src: Internally Generated + inenc.2.frame_period: 0.0 + inenc.2.label: Z + inenc.2.lsb_discard: 0.0 + inenc.2.msb_discard: 0.0 + inenc.2.protocol: Quadrature + inenc.2.rst_on_z: '0' + inenc.2.setp: 0 + inenc.2.val_capture: Min Max Mean + inenc.2.val_dataset: '' + inenc.2.val_offset: 0.0 + inenc.2.val_scale: 5.0e-06 + inenc.3.bits: 0.0 + inenc.3.clk: ZERO + inenc.3.clk_delay: 0 + inenc.3.clk_period: 0.0 + inenc.3.clk_src: Internally Generated + inenc.3.frame_period: 0.0 + inenc.3.label: Input encoder + inenc.3.lsb_discard: 0.0 + inenc.3.msb_discard: 0.0 + inenc.3.protocol: Quadrature + inenc.3.rst_on_z: '0' + inenc.3.setp: 0 + inenc.3.val_capture: Min Max Mean + inenc.3.val_dataset: '' + inenc.3.val_offset: 0.0 + inenc.3.val_scale: 5.0e-06 + inenc.4.bits: 0.0 + inenc.4.clk: ZERO + inenc.4.clk_delay: 0 + inenc.4.clk_period: 0.0 + inenc.4.clk_src: Internally Generated + inenc.4.frame_period: 0.0 + inenc.4.label: Input encoder + inenc.4.lsb_discard: 0.0 + inenc.4.msb_discard: 0.0 + inenc.4.protocol: Quadrature + inenc.4.rst_on_z: '0' + inenc.4.setp: 0 + inenc.4.val_capture: 'No' + inenc.4.val_dataset: '' + inenc.4.val_offset: 0.0 + inenc.4.val_scale: 1.0 + lut.1.func: A|B + lut.1.inpa: ZERO + lut.1.inpa_delay: 0 + lut.1.inpb: ZERO + lut.1.inpb_delay: 0 + lut.1.inpc: ZERO + lut.1.inpc_delay: 0 + lut.1.inpd: ZERO + lut.1.inpd_delay: 0 + lut.1.inpe: ZERO + lut.1.inpe_delay: 0 + lut.1.label: ensure pgen triggers at beginning and each iteration + lut.1.typea: Input-Level + lut.1.typeb: Input-Level + lut.1.typec: Input-Level + lut.1.typed: Input-Level + lut.1.typee: Input-Level + lut.2.func: A | B + lut.2.inpa: ZERO + lut.2.inpa_delay: 0 + lut.2.inpb: ZERO + lut.2.inpb_delay: 0 + lut.2.inpc: ZERO + lut.2.inpc_delay: 0 + lut.2.inpd: ZERO + lut.2.inpd_delay: 0 + lut.2.inpe: ZERO + lut.2.inpe_delay: 0 + lut.2.label: OR 2 + lut.2.typea: Input-Level + lut.2.typeb: Input-Level + lut.2.typec: Input-Level + lut.2.typed: Input-Level + lut.2.typee: Input-Level + lut.3.func: '0x00000000' + lut.3.inpa: ZERO + lut.3.inpa_delay: 0 + lut.3.inpb: ZERO + lut.3.inpb_delay: 0 + lut.3.inpc: ZERO + lut.3.inpc_delay: 0 + lut.3.inpd: ZERO + lut.3.inpd_delay: 0 + lut.3.inpe: ZERO + lut.3.inpe_delay: 0 + lut.3.label: AND2 + lut.3.typea: Input-Level + lut.3.typeb: Input-Level + lut.3.typec: Input-Level + lut.3.typed: Input-Level + lut.3.typee: Input-Level + lut.4.func: A | B | C + lut.4.inpa: ZERO + lut.4.inpa_delay: 0 + lut.4.inpb: ZERO + lut.4.inpb_delay: 0 + lut.4.inpc: ZERO + lut.4.inpc_delay: 0 + lut.4.inpd: ZERO + lut.4.inpd_delay: 0 + lut.4.inpe: ZERO + lut.4.inpe_delay: 0 + lut.4.label: 'OR 1 ' + lut.4.typea: Input-Level + lut.4.typeb: Input-Level + lut.4.typec: Input-Level + lut.4.typed: Input-Level + lut.4.typee: Input-Level + lut.5.func: '0x00000000' + lut.5.inpa: ZERO + lut.5.inpa_delay: 0 + lut.5.inpb: ZERO + lut.5.inpb_delay: 0 + lut.5.inpc: ZERO + lut.5.inpc_delay: 0 + lut.5.inpd: ZERO + lut.5.inpd_delay: 0 + lut.5.inpe: ZERO + lut.5.inpe_delay: 0 + lut.5.label: Lookup table + lut.5.typea: Input-Level + lut.5.typeb: Input-Level + lut.5.typec: Input-Level + lut.5.typed: Input-Level + lut.5.typee: Input-Level + lut.6.func: '0x00000000' + lut.6.inpa: ZERO + lut.6.inpa_delay: 0 + lut.6.inpb: ZERO + lut.6.inpb_delay: 0 + lut.6.inpc: ZERO + lut.6.inpc_delay: 0 + lut.6.inpd: ZERO + lut.6.inpd_delay: 0 + lut.6.inpe: ZERO + lut.6.inpe_delay: 0 + lut.6.label: Lookup table + lut.6.typea: Input-Level + lut.6.typeb: Input-Level + lut.6.typec: Input-Level + lut.6.typed: Input-Level + lut.6.typee: Input-Level + lut.7.func: '0x00000000' + lut.7.inpa: ZERO + lut.7.inpa_delay: 0 + lut.7.inpb: ZERO + lut.7.inpb_delay: 0 + lut.7.inpc: ZERO + lut.7.inpc_delay: 0 + lut.7.inpd: ZERO + lut.7.inpd_delay: 0 + lut.7.inpe: ZERO + lut.7.inpe_delay: 0 + lut.7.label: Lookup table + lut.7.typea: Input-Level + lut.7.typeb: Input-Level + lut.7.typec: Input-Level + lut.7.typed: Input-Level + lut.7.typee: Input-Level + lut.8.func: '0x00000000' + lut.8.inpa: ZERO + lut.8.inpa_delay: 0 + lut.8.inpb: ZERO + lut.8.inpb_delay: 0 + lut.8.inpc: ZERO + lut.8.inpc_delay: 0 + lut.8.inpd: ZERO + lut.8.inpd_delay: 0 + lut.8.inpe: ZERO + lut.8.inpe_delay: 0 + lut.8.label: Lookup table + lut.8.typea: Input-Level + lut.8.typeb: Input-Level + lut.8.typec: Input-Level + lut.8.typed: Input-Level + lut.8.typee: Input-Level + lvdsin.1.label: LVDS input + lvdsin.2.label: LVDS input + lvdsout.1.label: LVDS output + lvdsout.1.val: ZERO + lvdsout.1.val_delay: 0 + lvdsout.2.label: LVDS output + lvdsout.2.val: ZERO + lvdsout.2.val_delay: 0 + outenc.1.a: ZERO + outenc.1.a_delay: 0 + outenc.1.b: ZERO + outenc.1.b_delay: 0 + outenc.1.bits: 0.0 + outenc.1.data: ZERO + outenc.1.data_delay: 0 + outenc.1.enable: ZERO + outenc.1.enable_delay: 0 + outenc.1.generator_error: 'No' + outenc.1.label: Output encoder + outenc.1.protocol: Quadrature + outenc.1.qperiod: 0.0 + outenc.1.val: ZERO + outenc.1.z: ZERO + outenc.1.z_delay: 0 + outenc.2.a: ZERO + outenc.2.a_delay: 0 + outenc.2.b: ZERO + outenc.2.b_delay: 0 + outenc.2.bits: 0.0 + outenc.2.data: ZERO + outenc.2.data_delay: 0 + outenc.2.enable: ZERO + outenc.2.enable_delay: 0 + outenc.2.generator_error: 'No' + outenc.2.label: Output encoder + outenc.2.protocol: Quadrature + outenc.2.qperiod: 0.0 + outenc.2.val: ZERO + outenc.2.z: ZERO + outenc.2.z_delay: 0 + outenc.3.a: ZERO + outenc.3.a_delay: 0 + outenc.3.b: ZERO + outenc.3.b_delay: 0 + outenc.3.bits: 0.0 + outenc.3.data: ZERO + outenc.3.data_delay: 0 + outenc.3.enable: ZERO + outenc.3.enable_delay: 0 + outenc.3.generator_error: 'No' + outenc.3.label: Output encoder + outenc.3.protocol: Quadrature + outenc.3.qperiod: 0.0 + outenc.3.val: ZERO + outenc.3.z: ZERO + outenc.3.z_delay: 0 + outenc.4.a: ZERO + outenc.4.a_delay: 0 + outenc.4.b: ZERO + outenc.4.b_delay: 0 + outenc.4.bits: 0.0 + outenc.4.data: ZERO + outenc.4.data_delay: 0 + outenc.4.enable: ZERO + outenc.4.enable_delay: 0 + outenc.4.generator_error: 'No' + outenc.4.label: Output encoder + outenc.4.protocol: Quadrature + outenc.4.qperiod: 0.0 + outenc.4.val: ZERO + outenc.4.z: ZERO + outenc.4.z_delay: 0 + pcap.arm: false + pcap.bits0_capture: 'No' + pcap.bits0_dataset: '' + pcap.bits1_capture: 'No' + pcap.bits1_dataset: '' + pcap.bits2_capture: 'No' + pcap.bits2_dataset: '' + pcap.bits3_capture: 'No' + pcap.bits3_dataset: '' + pcap.enable: ZERO + pcap.enable_delay: 0 + pcap.gate: PULSE1.OUT + pcap.gate_delay: 0 + pcap.label: Position capture control + pcap.samples_capture: 'No' + pcap.samples_dataset: '' + pcap.shift_sum: 0.0 + pcap.trig: PULSE1.OUT + pcap.trig_delay: 0 + pcap.trig_edge: Rising + pcap.ts_end_capture: 'No' + pcap.ts_end_dataset: '' + pcap.ts_start_capture: 'No' + pcap.ts_start_dataset: '' + pcap.ts_trig_capture: Value + pcap.ts_trig_dataset: '' + pcomp.1.dir: Positive + pcomp.1.enable: ZERO + pcomp.1.enable_delay: 0 + pcomp.1.inp: ZERO + pcomp.1.label: Position compare + pcomp.1.pre_start: 0 + pcomp.1.pulses: 0.0 + pcomp.1.relative: Absolute + pcomp.1.start: 0 + pcomp.1.step: 40 + pcomp.1.width: 30 + pcomp.2.dir: Either + pcomp.2.enable: ZERO + pcomp.2.enable_delay: 0 + pcomp.2.inp: ZERO + pcomp.2.label: Position compare + pcomp.2.pre_start: 0 + pcomp.2.pulses: 0.0 + pcomp.2.relative: Absolute + pcomp.2.start: 400 + pcomp.2.step: 0 + pcomp.2.width: 50 + pgen.1.enable: ZERO + pgen.1.enable_delay: 0 + pgen.1.label: Position generator + pgen.1.out_capture: 'No' + pgen.1.out_dataset: '' + pgen.1.out_offset: 0.0 + pgen.1.out_scale: 1.0 + pgen.1.repeats: 0.0 + pgen.1.table: + position: [] + pgen.1.trig: ZERO + pgen.1.trig_delay: 0 + pgen.2.enable: ZERO + pgen.2.enable_delay: 0 + pgen.2.label: Position generator + pgen.2.out_capture: 'No' + pgen.2.out_dataset: '' + pgen.2.out_offset: 0.0 + pgen.2.out_scale: 1.0 + pgen.2.repeats: 0.0 + pgen.2.table: + position: [] + pgen.2.trig: ZERO + pgen.2.trig_delay: 0 + pulse.1.delay: 0.0 + pulse.1.enable: ZERO + pulse.1.enable_delay: 0 + pulse.1.label: Begin FGS on trig + pulse.1.pulses: 1.0 + pulse.1.step: 0.0 + pulse.1.trig: SEQ1.OUTA + pulse.1.trig_delay: 0 + pulse.1.trig_edge: Rising + pulse.1.width: 0.0001 + pulse.2.delay: 0.0 + pulse.2.enable: ZERO + pulse.2.enable_delay: 0 + pulse.2.label: One-shot pulse delay and stretch + pulse.2.pulses: 0.0 + pulse.2.step: 0.0 + pulse.2.trig: ZERO + pulse.2.trig_delay: 0 + pulse.2.trig_edge: Rising + pulse.2.width: 0.0 + pulse.3.delay: 0.0 + pulse.3.enable: ZERO + pulse.3.enable_delay: 0 + pulse.3.label: One-shot pulse delay and stretch + pulse.3.pulses: 0.0 + pulse.3.step: 0.0 + pulse.3.trig: ZERO + pulse.3.trig_delay: 0 + pulse.3.trig_edge: Rising + pulse.3.width: 0.0 + pulse.4.delay: 0.0 + pulse.4.enable: ZERO + pulse.4.enable_delay: 0 + pulse.4.label: One-shot pulse delay and stretch + pulse.4.pulses: 0.0 + pulse.4.step: 0.0 + pulse.4.trig: ZERO + pulse.4.trig_delay: 0 + pulse.4.trig_edge: Rising + pulse.4.width: 0.0 + seq.1.bita: TTLIN1.VAL + seq.1.bita_delay: 0 + seq.1.bitb: ZERO + seq.1.bitb_delay: 0 + seq.1.bitc: ZERO + seq.1.bitc_delay: 0 + seq.1.enable: ZERO + seq.1.enable_delay: 0 + seq.1.label: Sequencer + seq.1.posa: INENC1.VAL + seq.1.posb: ZERO + seq.1.posc: ZERO + seq.1.prescale: 1.0 + seq.1.repeats: 0.0 + seq.1.table: + outa1: [0, 1, 0, 0, 1, 0] + outa2: [0, 1, 0, 0, 1, 0] + outb1: [0, 0, 0, 0, 0, 0] + outb2: [0, 0, 0, 0, 0, 0] + outc1: [0, 0, 0, 0, 0, 0] + outc2: [0, 0, 0, 0, 0, 0] + outd1: [0, 0, 0, 0, 0, 0] + outd2: [0, 0, 0, 0, 0, 0] + oute1: [0, 0, 0, 0, 0, 0] + oute2: [0, 0, 0, 0, 0, 0] + outf1: [0, 0, 0, 0, 0, 0] + outf2: [0, 0, 0, 0, 0, 0] + position: [0, 68581, 186581, 0, 188579, 70579] + repeats: [1, 1, 1, 1, 1, 1] + time1: [0, 0, 0, 0, 0, 0] + time2: [1, 1, 1, 1, 1, 1] + trigger: + - BITA=1 + - POSA>=POSITION + - POSA>=POSITION + - BITA=1 + - POSA<=POSITION + - POSA<=POSITION + seq.2.bita: ZERO + seq.2.bita_delay: 0 + seq.2.bitb: ZERO + seq.2.bitb_delay: 0 + seq.2.bitc: ZERO + seq.2.bitc_delay: 0 + seq.2.enable: ZERO + seq.2.enable_delay: 0 + seq.2.label: PCOMP from table + seq.2.posa: ZERO + seq.2.posb: ZERO + seq.2.posc: ZERO + seq.2.prescale: 0.0 + seq.2.repeats: 0.0 + seq.2.table: + outa1: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0] + outa2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0] + outb1: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outb2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outc1: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outc2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outd1: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outd2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + oute1: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + oute2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outf1: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + outf2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + position: [-1985, 2000, 6000, 10000, 14000, 18000, 22000, 26000, 30000, 34000, + 38000, 42000, 46000, 50000, 54000, 58000, 62000, 66000, 70000, 74000, 78000, + 82000, 86000, 90000, 94000, 98000, 102000, 106000, 110000, 114000, 117990, 117985, + 114000, 110000, 106000, 102000, 98000, 94000, 90000, 86000, 82000, 78000, 74000, + 70000, 66000, 62000, 58000, 54000, 50000, 46000, 42000, 38000, 34000, 30000, + 26000, 22000, 18000, 14000, 10000, 6000, 2000, -1990] + repeats: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + time1: [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + time2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] + trigger: + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA>=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + - POSA<=POSITION + sfp3_sync_in.label: sfp panda synchronizer + sfp3_sync_in.pos1_capture: 'No' + sfp3_sync_in.pos1_dataset: '' + sfp3_sync_in.pos1_offset: 0.0 + sfp3_sync_in.pos1_scale: 1.0 + sfp3_sync_in.pos2_capture: 'No' + sfp3_sync_in.pos2_dataset: '' + sfp3_sync_in.pos2_offset: 0.0 + sfp3_sync_in.pos2_scale: 1.0 + sfp3_sync_in.pos3_capture: 'No' + sfp3_sync_in.pos3_dataset: '' + sfp3_sync_in.pos3_offset: 0.0 + sfp3_sync_in.pos3_scale: 1.0 + sfp3_sync_in.pos4_capture: 'No' + sfp3_sync_in.pos4_dataset: '' + sfp3_sync_in.pos4_offset: 0.0 + sfp3_sync_in.pos4_scale: 1.0 + sfp3_sync_out.bit1: ZERO + sfp3_sync_out.bit10: ZERO + sfp3_sync_out.bit10_delay: 0 + sfp3_sync_out.bit11: ZERO + sfp3_sync_out.bit11_delay: 0 + sfp3_sync_out.bit12: ZERO + sfp3_sync_out.bit12_delay: 0 + sfp3_sync_out.bit13: ZERO + sfp3_sync_out.bit13_delay: 0 + sfp3_sync_out.bit14: ZERO + sfp3_sync_out.bit14_delay: 0 + sfp3_sync_out.bit15: ZERO + sfp3_sync_out.bit15_delay: 0 + sfp3_sync_out.bit16: ZERO + sfp3_sync_out.bit16_delay: 0 + sfp3_sync_out.bit1_delay: 0 + sfp3_sync_out.bit2: ZERO + sfp3_sync_out.bit2_delay: 0 + sfp3_sync_out.bit3: ZERO + sfp3_sync_out.bit3_delay: 0 + sfp3_sync_out.bit4: ZERO + sfp3_sync_out.bit4_delay: 0 + sfp3_sync_out.bit5: ZERO + sfp3_sync_out.bit5_delay: 0 + sfp3_sync_out.bit6: ZERO + sfp3_sync_out.bit6_delay: 0 + sfp3_sync_out.bit7: ZERO + sfp3_sync_out.bit7_delay: 0 + sfp3_sync_out.bit8: ZERO + sfp3_sync_out.bit8_delay: 0 + sfp3_sync_out.bit9: ZERO + sfp3_sync_out.bit9_delay: 0 + sfp3_sync_out.label: sfp panda synchronizer + sfp3_sync_out.pos1: ZERO + sfp3_sync_out.pos2: ZERO + sfp3_sync_out.pos3: ZERO + sfp3_sync_out.pos4: ZERO + srgate.1.enable: ZERO + srgate.1.enable_delay: 0 + srgate.1.label: Reset on new row + srgate.1.rst: ZERO + srgate.1.rst_delay: 0 + srgate.1.rst_edge: Rising + srgate.1.set: ZERO + srgate.1.set_delay: 0 + srgate.1.set_edge: Rising + srgate.1.when_disabled: Set output low + srgate.2.enable: ZERO + srgate.2.enable_delay: 0 + srgate.2.label: Set reset gate + srgate.2.rst: ZERO + srgate.2.rst_delay: 0 + srgate.2.rst_edge: Rising + srgate.2.set: ZERO + srgate.2.set_delay: 0 + srgate.2.set_edge: Rising + srgate.2.when_disabled: Set output low + srgate.3.enable: ZERO + srgate.3.enable_delay: 0 + srgate.3.label: Set reset gate + srgate.3.rst: ZERO + srgate.3.rst_delay: 0 + srgate.3.rst_edge: Rising + srgate.3.set: ZERO + srgate.3.set_delay: 0 + srgate.3.set_edge: Rising + srgate.3.when_disabled: Set output low + srgate.4.enable: ZERO + srgate.4.enable_delay: 0 + srgate.4.label: Set reset gate + srgate.4.rst: ZERO + srgate.4.rst_delay: 0 + srgate.4.rst_edge: Rising + srgate.4.set: ZERO + srgate.4.set_delay: 0 + srgate.4.set_edge: Rising + srgate.4.when_disabled: Set output low + system.ext_clock: int clock + system.label: System control FPGA + ttlin.1.label: TTL input + ttlin.1.term: High-Z + ttlin.2.label: TTL input + ttlin.2.term: High-Z + ttlin.3.label: TTL input + ttlin.3.term: High-Z + ttlin.4.label: TTL input + ttlin.4.term: High-Z + ttlin.5.label: TTL input + ttlin.5.term: High-Z + ttlin.6.label: TTL input + ttlin.6.term: High-Z + ttlout.1.label: TTL output + ttlout.1.val: PULSE1.OUT + ttlout.1.val_delay: 0 + ttlout.10.label: TTL output + ttlout.10.val: ZERO + ttlout.10.val_delay: 0 + ttlout.2.label: TTL output + ttlout.2.val: ZERO + ttlout.2.val_delay: 0 + ttlout.3.label: TTL output + ttlout.3.val: ZERO + ttlout.3.val_delay: 0 + ttlout.4.label: TTL output + ttlout.4.val: ZERO + ttlout.4.val_delay: 0 + ttlout.5.label: TTL output + ttlout.5.val: ZERO + ttlout.5.val_delay: 0 + ttlout.6.label: TTL output + ttlout.6.val: ZERO + ttlout.6.val_delay: 0 + ttlout.7.label: TTL output + ttlout.7.val: ZERO + ttlout.7.val_delay: 0 + ttlout.8.label: TTL output + ttlout.8.val: ZERO + ttlout.8.val_delay: 0 + ttlout.9.label: TTL output + ttlout.9.val: ZERO + ttlout.9.val_delay: 0 diff --git a/src/mx_bluesky/hyperion/tracing.py b/src/mx_bluesky/hyperion/tracing.py new file mode 100644 index 000000000..4dd5fbc1d --- /dev/null +++ b/src/mx_bluesky/hyperion/tracing.py @@ -0,0 +1,28 @@ +from opentelemetry import metrics, trace +from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.metrics import MeterProvider +from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor + + +def setup_tracing(): + resource = Resource(attributes={SERVICE_NAME: "Hyperion"}) + + traceProvider = TracerProvider(resource=resource) + processor = BatchSpanProcessor( + OTLPSpanExporter(endpoint="http://0.0.0.0:4318/v1/traces") + ) + traceProvider.add_span_processor(processor) + trace.set_tracer_provider(traceProvider) + + reader = PeriodicExportingMetricReader( + OTLPMetricExporter(endpoint="http://0.0.0.0:4318/v1/metrics") + ) + meterProvider = MeterProvider(resource=resource, metric_readers=[reader]) + metrics.set_meter_provider(meterProvider) + + +TRACER = trace.get_tracer(__name__) diff --git a/src/mx_bluesky/hyperion/utils/context.py b/src/mx_bluesky/hyperion/utils/context.py new file mode 100644 index 000000000..f67c4a1b6 --- /dev/null +++ b/src/mx_bluesky/hyperion/utils/context.py @@ -0,0 +1,84 @@ +import dataclasses +from typing import Any, ClassVar, Protocol, TypeVar, get_type_hints + +from blueapi.core import BlueskyContext +from blueapi.core.bluesky_types import Device +from dodal.utils import get_beamline_based_on_environment_variable + +import mx_bluesky.hyperion.experiment_plans as hyperion_plans +from mx_bluesky.hyperion.log import LOGGER + +T = TypeVar("T", bound=Device) + + +class _IsDataclass(Protocol): + """Protocol followed by any dataclass""" + + __dataclass_fields__: ClassVar[dict] + + +DT = TypeVar("DT", bound=_IsDataclass) + + +def find_device_in_context( + context: BlueskyContext, + name: str, + # Typing in here is wrong (see https://github.com/microsoft/pyright/issues/7228#issuecomment-1934500232) + # but this whole thing will go away when we do https://github.com/DiamondLightSource/hyperion/issues/868 + expected_type: type[T] = Device, # type: ignore +) -> T: + LOGGER.debug(f"Looking for device {name} of type {expected_type} in context") + + device = context.find_device(name) + if device is None: + raise ValueError( + f"Cannot find device named '{name}' in bluesky context {context.devices}." + ) + + if not isinstance(device, expected_type): + raise ValueError( + f"Found device named '{name}' and expected it to be a '{expected_type}' but it was a '{device.__class__.__name__}'" + ) + + LOGGER.debug(f"Found matching device {device}") + return device + + +def device_composite_from_context(context: BlueskyContext, dc: type[DT]) -> DT: + """ + Initializes all of the devices referenced in a given dataclass from a provided + context, checking that the types of devices returned by the context are compatible + with the type annotations of the dataclass. + + Note that if the context was not created with `wait_for_connection=True` devices may + still be unconnected. + """ + LOGGER.debug( + f"Attempting to initialize devices referenced in dataclass {dc} from blueapi context" + ) + + devices: dict[str, Any] = {} + dc_type_hints: dict[str, Any] = get_type_hints(dc) + + for field in dataclasses.fields(dc): + device = find_device_in_context( + context, field.name, expected_type=dc_type_hints.get(field.name, Device) + ) + + devices[field.name] = device + + return dc(**devices) + + +def setup_context(wait_for_connection: bool = True) -> BlueskyContext: + context = BlueskyContext() + context.with_plan_module(hyperion_plans) + + context.with_dodal_module( + get_beamline_based_on_environment_variable(), + wait_for_connection=wait_for_connection, + ) + + LOGGER.info(f"Plans found in context: {context.plan_functions.keys()}") + + return context diff --git a/src/mx_bluesky/hyperion/utils/utils.py b/src/mx_bluesky/hyperion/utils/utils.py new file mode 100644 index 000000000..6fd4b1332 --- /dev/null +++ b/src/mx_bluesky/hyperion/utils/utils.py @@ -0,0 +1,25 @@ +from scanspec.core import AxesPoints, Axis +from scipy.constants import physical_constants + +hc_in_eV_and_Angstrom: float = ( + physical_constants["speed of light in vacuum"][0] + * physical_constants["Planck constant in eV/Hz"][0] + * 1e10 # Angstroms per metre +) + + +def interconvert_eV_Angstrom(wavelength_or_energy: float) -> float: + return hc_in_eV_and_Angstrom / wavelength_or_energy + + +def convert_eV_to_angstrom(hv: float) -> float: + return interconvert_eV_Angstrom(hv) + + +def convert_angstrom_to_eV(wavelength: float) -> float: + return interconvert_eV_Angstrom(wavelength) + + +def number_of_frames_from_scan_spec(scan_points: AxesPoints[Axis]): + ax = list(scan_points.keys())[0] + return len(scan_points[ax]) diff --git a/src/mx_bluesky/hyperion/utils/validation.py b/src/mx_bluesky/hyperion/utils/validation.py new file mode 100644 index 000000000..b8171fadf --- /dev/null +++ b/src/mx_bluesky/hyperion/utils/validation.py @@ -0,0 +1,193 @@ +import gzip +import json +import os +import shutil +from pathlib import Path +from unittest.mock import patch + +import bluesky.preprocessors as bpp +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.devices.oav.oav_parameters import OAVConfigParams +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +DISPLAY_CONFIGURATION = "tests/devices/unit_tests/test_display.configuration" +ZOOM_LEVELS_XML = "tests/devices/unit_tests/test_jCameraManZoomLevels.xml" +TEST_DATA_DIRECTORY = Path("tests/test_data/nexus_files/rotation") +TEST_METAFILE = "ins_8_5_meta.h5.gz" +FAKE_DATAFILE = "../fake_data.h5" +FILENAME_STUB = "test_rotation_nexus" + + +def test_params(filename_stub, dir): + def get_params(filename): + with open(filename) as f: + return json.loads(f.read()) + + params = RotationScan( + **get_params( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + ) + params.file_name = filename_stub + params.scan_width_deg = 360 + params.demand_energy_ev = 12700 + params.storage_directory = str(dir) + params.x_start_um = 0 + params.y_start_um = 0 + params.z_start_um = 0 + params.exposure_time_s = 0.004 + return params + + +def fake_rotation_scan( + parameters: RotationScan, + subscription: RotationNexusFileCallback, + rotation_devices: RotationScanComposite, +): + @bpp.subs_decorator(subscription) + @bpp.set_run_key_decorator("rotation_scan_with_cleanup_and_subs") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": parameters.json(), + "activate_callbacks": "RotationNexusFileCallback", + } + ) + def plan(): + yield from read_hardware_during_collection( + rotation_devices.aperture_scatterguard, + rotation_devices.attenuator, + rotation_devices.flux, + rotation_devices.dcm, + rotation_devices.eiger, + ) + + return plan() + + +def fake_create_rotation_devices(): + eiger = i03.eiger(fake_with_ophyd_sim=True) + smargon = i03.smargon(fake_with_ophyd_sim=True) + zebra = i03.zebra(fake_with_ophyd_sim=True) + detector_motion = i03.detector_motion(fake_with_ophyd_sim=True) + backlight = i03.backlight(fake_with_ophyd_sim=True) + attenuator = i03.attenuator(fake_with_ophyd_sim=True) + flux = i03.flux(fake_with_ophyd_sim=True) + undulator = i03.undulator(fake_with_ophyd_sim=True) + aperture_scatterguard = i03.aperture_scatterguard(fake_with_ophyd_sim=True) + synchrotron = i03.synchrotron(fake_with_ophyd_sim=True) + s4_slit_gaps = i03.s4_slit_gaps(fake_with_ophyd_sim=True) + dcm = i03.dcm(fake_with_ophyd_sim=True) + robot = i03.robot(fake_with_ophyd_sim=True) + oav = i03.oav( + fake_with_ophyd_sim=True, + params=OAVConfigParams( + zoom_params_file=ZOOM_LEVELS_XML, display_config=DISPLAY_CONFIGURATION + ), + ) + + set_mock_value(smargon.omega.max_velocity, 131) + set_mock_value(dcm.energy_in_kev.user_readback, 12700) + oav.zoom_controller.fvst.sim_put("1.0x") # type: ignore + + return RotationScanComposite( + attenuator=attenuator, + backlight=backlight, + dcm=dcm, + detector_motion=detector_motion, + eiger=eiger, + flux=flux, + smargon=smargon, + undulator=undulator, + aperture_scatterguard=aperture_scatterguard, + synchrotron=synchrotron, + s4_slit_gaps=s4_slit_gaps, + zebra=zebra, + robot=robot, + oav=oav, + ) + + +def sim_rotation_scan_to_create_nexus( + test_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + filename_stub, + RE, +): + run_number = test_params.detector_params.run_number + nexus_filename = f"{filename_stub}_{run_number}.nxs" + + fake_create_rotation_devices.eiger.bit_depth.sim_put(32) # type: ignore + + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.get_start_and_predicted_end_time", + return_value=("test_time", "test_time"), + ): + RE( + fake_rotation_scan( + test_params, RotationNexusFileCallback(), fake_create_rotation_devices + ) + ) + + nexus_path = Path(test_params.storage_directory) / nexus_filename + assert os.path.isfile(nexus_path) + return filename_stub, run_number + + +def extract_metafile(input_filename, output_filename): + with gzip.open(input_filename) as metafile_fo: + with open(output_filename, "wb") as output_fo: + output_fo.write(metafile_fo.read()) + + +def _generate_fake_nexus(filename, dir=os.getcwd()): + RE = RunEngine({}) + params = test_params(filename, dir) + run_number = params.detector_params.run_number + filename_stub, run_number = sim_rotation_scan_to_create_nexus( + params, fake_create_rotation_devices(), filename, RE + ) + return filename_stub, run_number + + +def generate_test_nexus(): + filename_stub, run_number = _generate_fake_nexus(FILENAME_STUB) + # ugly hack because we get double free error on exit + with open("OUTPUT_FILENAME", "x") as f: + f.write(f"{filename_stub}_{run_number}.nxs") + + extract_metafile( + str(TEST_DATA_DIRECTORY / TEST_METAFILE), + f"{FILENAME_STUB}_{run_number}_meta.h5", + ) + + new_hyp_data = [f"{FILENAME_STUB}_{run_number}_00000{n}.h5" for n in [1, 2, 3, 4]] + [shutil.copy(TEST_DATA_DIRECTORY / FAKE_DATAFILE, d) for d in new_hyp_data] + + exit(0) + + +def copy_test_meta_data_files(): + extract_metafile( + str(TEST_DATA_DIRECTORY / TEST_METAFILE), + f"{TEST_DATA_DIRECTORY}/ins_8_5_meta.h5", + ) + new_data = [f"{TEST_DATA_DIRECTORY}/ins_8_5_00000{n}.h5" for n in [1, 2, 3, 4]] + [shutil.copy(TEST_DATA_DIRECTORY / FAKE_DATAFILE, d) for d in new_data] + + +if __name__ == "__main__": + generate_test_nexus() diff --git a/src/mx_bluesky/i04/__init__.py b/src/mx_bluesky/i04/__init__.py deleted file mode 100644 index 1de75fce0..000000000 --- a/src/mx_bluesky/i04/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from mx_bluesky.i04.thawing_plan import thaw, thaw_and_center - -__all__ = ["thaw", "thaw_and_center"] diff --git a/src/mx_bluesky/i24/serial/parameters/__init__.py b/src/mx_bluesky/i24/serial/parameters/__init__.py deleted file mode 100644 index 184d36ff5..000000000 --- a/src/mx_bluesky/i24/serial/parameters/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from mx_bluesky.i24.serial.parameters.constants import SSXType -from mx_bluesky.i24.serial.parameters.experiment_parameters import ( - ChipDescription, - ExtruderParameters, - FixedTargetParameters, -) -from mx_bluesky.i24.serial.parameters.utils import get_chip_format - -__all__ = [ - "SSXType", - "ExtruderParameters", - "ChipDescription", - "FixedTargetParameters", - "get_chip_format", -] diff --git a/src/mx_bluesky/jupyter_example.ipynb b/src/mx_bluesky/jupyter_example.ipynb index cfa9681ae..43bf10453 100644 --- a/src/mx_bluesky/jupyter_example.ipynb +++ b/src/mx_bluesky/jupyter_example.ipynb @@ -41,9 +41,10 @@ "metadata": {}, "outputs": [], "source": [ - "from ophyd.sim import det, motor\n", "from bluesky.plans import scan\n", - "dets = [det] # just one in this case, but it could be more than one\n", + "from ophyd.sim import det, motor\n", + "\n", + "dets = [det] # just one in this case, but it could be more than one\n", "\n", "RE(scan(dets, motor, -1, 1, 10))" ] diff --git a/tests/conftest.py b/tests/conftest.py index cee8cdfa7..e1d065cc0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,31 +1,864 @@ import asyncio -import os -import time +import gzip +import json +import logging +import sys +import threading +from collections.abc import Generator, Sequence +from functools import partial +from typing import Any +from unittest.mock import MagicMock, patch +import bluesky.plan_stubs as bps +import numpy as np import pytest from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator +from bluesky.utils import Msg +from dodal.beamlines import i03 +from dodal.common.beamlines import beamline_utils +from dodal.common.beamlines.beamline_parameters import ( + GDABeamlineParameters, +) +from dodal.common.beamlines.beamline_utils import clear_devices +from dodal.devices.aperturescatterguard import ( + ApertureFiveDimensionalLocation, + AperturePosition, + ApertureScatterguard, + ApertureScatterguardTolerances, + SingleAperturePosition, +) +from dodal.devices.attenuator import Attenuator +from dodal.devices.backlight import Backlight +from dodal.devices.dcm import DCM +from dodal.devices.detector.detector_motion import DetectorMotion +from dodal.devices.eiger import EigerDetector +from dodal.devices.fast_grid_scan import FastGridScanCommon +from dodal.devices.flux import Flux +from dodal.devices.oav.oav_detector import OAV, OAVConfigParams +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.robot import BartRobot +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.smargon import Smargon +from dodal.devices.synchrotron import Synchrotron, SynchrotronMode +from dodal.devices.thawer import Thawer +from dodal.devices.undulator import Undulator +from dodal.devices.util.test_utils import patch_motor as oa_patch_motor +from dodal.devices.webcam import Webcam +from dodal.devices.zebra import Zebra +from dodal.log import LOGGER as dodal_logger +from dodal.log import set_up_all_logging_handlers +from ophyd.sim import NullStatus +from ophyd_async.core import Device, DeviceVector, callback_on_mock_put, set_mock_value +from ophyd_async.core.async_status import AsyncStatus +from ophyd_async.epics.motion.motor import Motor +from ophyd_async.epics.signal import epics_signal_rw +from ophyd_async.panda._common_blocks import DatasetTable +from scanspec.core import Path as ScanPath +from scanspec.specs import Line -# Prevent pytest from catching exceptions when debugging in vscode so that break on -# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) -if os.getenv("PYTEST_RAISE", "0") == "1": +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + FlyScanXRayCentreComposite, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, +) +from mx_bluesky.hyperion.external_interaction.callbacks.logging_callback import ( + VerbosePlanExecutionLoggingCallback, +) +from mx_bluesky.hyperion.external_interaction.config_server import FeatureFlags +from mx_bluesky.hyperion.log import ( + ALL_LOGGERS, + ISPYB_LOGGER, + LOGGER, + NEXUS_LOGGER, + _get_logging_dir, + do_default_logging_setup, +) +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + ThreeDGridScan, +) +from mx_bluesky.hyperion.parameters.rotation import MultiRotationScan, RotationScan - @pytest.hookimpl(tryfirst=True) - def pytest_exception_interact(call): - raise call.excinfo.value +i03.DAQ_CONFIGURATION_PATH = "tests/test_data/test_daq_configuration" - @pytest.hookimpl(tryfirst=True) - def pytest_internalerror(excinfo): - raise excinfo.value + +def raw_params_from_file(filename): + with open(filename) as f: + return json.loads(f.read()) + + +def default_raw_params(): + return raw_params_from_file( + "tests/test_data/parameter_json_files/test_gridscan_param_defaults.json" + ) + + +def create_dummy_scan_spec(x_steps, y_steps, z_steps): + x_line = Line("sam_x", 0, 10, 10) + y_line = Line("sam_y", 10, 20, 20) + z_line = Line("sam_z", 30, 50, 30) + + specs = [y_line * ~x_line, z_line * ~x_line] + specs = [ScanPath(spec.calculate()) for spec in specs] + return [spec.consume().midpoints for spec in specs] + + +def _reset_loggers(loggers): + """Clear all handlers and tear down the logging hierarchy, leave logger references intact.""" + clear_log_handlers(loggers) + for logger in loggers: + if logger.name != "Hyperion": + # Hyperion parent is configured on module import, do not remove + logger.parent = logging.getLogger() + + +def clear_log_handlers(loggers: Sequence[logging.Logger]): + for logger in loggers: + for handler in logger.handlers: + handler.close() + logger.handlers.clear() + + +def pytest_runtest_setup(item): + markers = [m.name for m in item.own_markers] + if item.config.getoption("logging") and "skip_log_setup" not in markers: + if LOGGER.handlers == []: + if dodal_logger.handlers == []: + print("Initialising Hyperion logger for tests") + do_default_logging_setup(dev_mode=True) + if ISPYB_LOGGER.handlers == []: + print("Initialising ISPyB logger for tests") + set_up_all_logging_handlers( + ISPYB_LOGGER, + _get_logging_dir(), + "hyperion_ispyb_callback.log", + True, + 10000, + ) + if NEXUS_LOGGER.handlers == []: + print("Initialising nexus logger for tests") + set_up_all_logging_handlers( + NEXUS_LOGGER, + _get_logging_dir(), + "hyperion_ispyb_callback.log", + True, + 10000, + ) + else: + print("Skipping log setup for log test - deleting existing handlers") + _reset_loggers([*ALL_LOGGERS, dodal_logger]) + + +def pytest_runtest_teardown(item): + if "dodal.common.beamlines.beamline_utils" in sys.modules: + sys.modules["dodal.common.beamlines.beamline_utils"].clear_devices() + markers = [m.name for m in item.own_markers] + if "skip_log_setup" in markers: + _reset_loggers([*ALL_LOGGERS, dodal_logger]) @pytest.fixture -async def RE(): - RE = RunEngine(call_returns_result=True) - # make sure the event loop is thoroughly up and running before we try to create - # any ophyd_async devices which might need it - timeout = time.monotonic() + 1 - while not RE.loop.is_running(): - await asyncio.sleep(0) - if time.monotonic() > timeout: - raise TimeoutError("This really shouldn't happen but just in case...") +def RE(): + RE = RunEngine({}, call_returns_result=True) + RE.subscribe( + VerbosePlanExecutionLoggingCallback() + ) # log all events at INFO for easier debugging yield RE + try: + RE.halt() + except Exception as e: + print(f"Got exception while halting RunEngine {e}") + finally: + stopped_event = threading.Event() + + def stop_event_loop(): + RE.loop.stop() # noqa: F821 + stopped_event.set() + + RE.loop.call_soon_threadsafe(stop_event_loop) + stopped_event.wait(10) + del RE + + +def pass_on_mock(motor, call_log: MagicMock | None = None): + def _pass_on_mock(value, **kwargs): + set_mock_value(motor.user_readback, value) + if call_log is not None: + call_log(value, **kwargs) + + return _pass_on_mock + + +def patch_async_motor( + motor: Motor, initial_position=0, call_log: MagicMock | None = None +): + set_mock_value(motor.user_setpoint, initial_position) + set_mock_value(motor.user_readback, initial_position) + set_mock_value(motor.deadband, 0.001) + set_mock_value(motor.motor_done_move, 1) + set_mock_value(motor.velocity, 1) + return callback_on_mock_put(motor.user_setpoint, pass_on_mock(motor, call_log)) + + +@pytest.fixture +def beamline_parameters(): + return GDABeamlineParameters.from_file( + "tests/test_data/test_beamline_parameters.txt" + ) + + +@pytest.fixture +def test_fgs_params(): + return ThreeDGridScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_parameters.json" + ) + ) + + +@pytest.fixture +def test_panda_fgs_params(test_fgs_params: ThreeDGridScan): + test_fgs_params.use_panda = True + return test_fgs_params + + +@pytest.fixture +def test_rotation_params(): + return RotationScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + ) + + +@pytest.fixture +def test_rotation_params_nomove(): + return RotationScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters_nomove.json" + ) + ) + + +@pytest.fixture +def test_multi_rotation_params(): + return MultiRotationScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_multi_rotation_scan_parameters.json" + ) + ) + + +@pytest.fixture +def done_status(): + return NullStatus() + + +@pytest.fixture +def eiger(done_status): + eiger = i03.eiger(fake_with_ophyd_sim=True) + eiger.stage = MagicMock(return_value=done_status) + eiger.do_arm.set = MagicMock(return_value=done_status) + eiger.unstage = MagicMock(return_value=done_status) + return eiger + + +@pytest.fixture +def smargon(RE: RunEngine) -> Generator[Smargon, None, None]: + smargon = i03.smargon(fake_with_ophyd_sim=True) + # Initial positions, needed for stub_offsets + set_mock_value(smargon.stub_offsets.center_at_current_position.disp, 0) + set_mock_value(smargon.x.user_readback, 0.0) + set_mock_value(smargon.y.user_readback, 0.0) + set_mock_value(smargon.z.user_readback, 0.0) + set_mock_value(smargon.x.high_limit_travel, 2) + set_mock_value(smargon.x.low_limit_travel, -2) + + with ( + patch_async_motor(smargon.omega), + patch_async_motor(smargon.x), + patch_async_motor(smargon.y), + patch_async_motor(smargon.z), + patch_async_motor(smargon.chi), + patch_async_motor(smargon.phi), + ): + yield smargon + clear_devices() + + +@pytest.fixture +def zebra(): + RunEngine() + zebra = i03.zebra(fake_with_ophyd_sim=True) + + def mock_side(*args, **kwargs): + set_mock_value(zebra.pc.arm.armed, *args, **kwargs) + return NullStatus() + + zebra.pc.arm.set = MagicMock(side_effect=mock_side) + return zebra + + +@pytest.fixture +def backlight(): + return i03.backlight(fake_with_ophyd_sim=True) + + +@pytest.fixture +def fast_grid_scan(): + return i03.zebra_fast_grid_scan(fake_with_ophyd_sim=True) + + +@pytest.fixture +def detector_motion(RE): + det = i03.detector_motion(fake_with_ophyd_sim=True) + with patch_async_motor(det.z): + yield det + + +@pytest.fixture +def undulator(): + return i03.undulator(fake_with_ophyd_sim=True) + + +@pytest.fixture +def s4_slit_gaps(): + return i03.s4_slit_gaps(fake_with_ophyd_sim=True) + + +@pytest.fixture +def synchrotron(RE): + synchrotron = i03.synchrotron(fake_with_ophyd_sim=True) + set_mock_value(synchrotron.synchrotron_mode, SynchrotronMode.USER) + set_mock_value(synchrotron.top_up_start_countdown, 10) + return synchrotron + + +@pytest.fixture +def oav(test_config_files): + parameters = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + oav = i03.oav(fake_with_ophyd_sim=True, params=parameters) + oav.snapshot.trigger = MagicMock(return_value=NullStatus()) + return oav + + +@pytest.fixture +def flux(): + return i03.flux(fake_with_ophyd_sim=True) + + +@pytest.fixture +def pin_tip(): + return i03.pin_tip_detection(fake_with_ophyd_sim=True) + + +@pytest.fixture +def ophyd_pin_tip_detection(): + RunEngine() # A RE is needed to start the bluesky loop + pin_tip_detection = i03.pin_tip_detection(fake_with_ophyd_sim=True) + return pin_tip_detection + + +@pytest.fixture +def robot(done_status): + RunEngine() # A RE is needed to start the bluesky loop + robot = i03.robot(fake_with_ophyd_sim=True) + set_mock_value(robot.barcode, "BARCODE") + robot.set = MagicMock(return_value=done_status) + return robot + + +@pytest.fixture +def attenuator(RE): + attenuator = i03.attenuator(fake_with_ophyd_sim=True) + set_mock_value(attenuator.actual_transmission, 0.49118047952) + + @AsyncStatus.wrap + async def fake_attenuator_set(val): + set_mock_value(attenuator.actual_transmission, val) + + attenuator.set = MagicMock(side_effect=fake_attenuator_set) + + yield attenuator + + +@pytest.fixture +def xbpm_feedback(done_status): + xbpm = i03.xbpm_feedback(fake_with_ophyd_sim=True) + xbpm.trigger = MagicMock(return_value=done_status) # type: ignore + yield xbpm + beamline_utils.clear_devices() + + +@pytest.fixture +def dcm(RE): + dcm = i03.dcm(fake_with_ophyd_sim=True) + set_mock_value(dcm.energy_in_kev.user_readback, 12.7) + set_mock_value(dcm.pitch_in_mrad.user_readback, 1) + return dcm + + +@pytest.fixture +def vfm(RE): + vfm = i03.vfm(fake_with_ophyd_sim=True) + vfm.bragg_to_lat_lookup_table_path = ( + "tests/test_data/test_beamline_vfm_lat_converter.txt" + ) + return vfm + + +@pytest.fixture +def lower_gonio(RE): + lower_gonio = i03.lower_gonio(fake_with_ophyd_sim=True) + with ( + oa_patch_motor(lower_gonio.x), + oa_patch_motor(lower_gonio.y), + oa_patch_motor(lower_gonio.z), + ): + yield lower_gonio + + +@pytest.fixture +def vfm_mirror_voltages(): + voltages = i03.vfm_mirror_voltages(fake_with_ophyd_sim=True) + voltages.voltage_lookup_table_path = "tests/test_data/test_mirror_focus.json" + yield voltages + beamline_utils.clear_devices() + + +@pytest.fixture +def undulator_dcm(RE, dcm): + undulator_dcm = i03.undulator_dcm(fake_with_ophyd_sim=True) + undulator_dcm.dcm = dcm + undulator_dcm.dcm_roll_converter_lookup_table_path = ( + "tests/test_data/test_beamline_dcm_roll_converter.txt" + ) + undulator_dcm.dcm_pitch_converter_lookup_table_path = ( + "tests/test_data/test_beamline_dcm_pitch_converter.txt" + ) + yield undulator_dcm + beamline_utils.clear_devices() + + +@pytest.fixture +def webcam(RE) -> Generator[Webcam, Any, Any]: + webcam = i03.webcam(fake_with_ophyd_sim=True) + with patch.object(webcam, "_write_image"): + yield webcam + + +@pytest.fixture +def thawer(RE) -> Generator[Thawer, Any, Any]: + yield i03.thawer(fake_with_ophyd_sim=True) + + +@pytest.fixture +def aperture_scatterguard(RE): + positions = { + AperturePosition.LARGE: SingleAperturePosition( + location=ApertureFiveDimensionalLocation(0, 1, 2, 3, 4), + name="Large", + GDA_name="LARGE_APERTURE", + radius_microns=100, + ), + AperturePosition.MEDIUM: SingleAperturePosition( + location=ApertureFiveDimensionalLocation(5, 6, 2, 8, 9), + name="Medium", + GDA_name="MEDIUM_APERTURE", + radius_microns=50, + ), + AperturePosition.SMALL: SingleAperturePosition( + location=ApertureFiveDimensionalLocation(10, 11, 2, 13, 14), + name="Small", + GDA_name="SMALL_APERTURE", + radius_microns=20, + ), + AperturePosition.ROBOT_LOAD: SingleAperturePosition( + location=ApertureFiveDimensionalLocation(15, 16, 2, 18, 19), + name="Robot_load", + GDA_name="ROBOT_LOAD", + radius_microns=None, + ), + } + with ( + patch( + "dodal.beamlines.i03.load_positions_from_beamline_parameters", + return_value=positions, + ), + patch( + "dodal.beamlines.i03.load_tolerances_from_beamline_params", + return_value=ApertureScatterguardTolerances(0.1, 0.1, 0.1, 0.1, 0.1), + ), + ): + ap_sg = i03.aperture_scatterguard(fake_with_ophyd_sim=True) + with ( + patch_async_motor(ap_sg._aperture.x), + patch_async_motor(ap_sg._aperture.y), + patch_async_motor(ap_sg._aperture.z, 2), + patch_async_motor(ap_sg._scatterguard.x), + patch_async_motor(ap_sg._scatterguard.y), + ): + RE(bps.abs_set(ap_sg, AperturePosition.SMALL)) + + set_mock_value(ap_sg._aperture.small, 1) + yield ap_sg + + +@pytest.fixture() +def test_config_files(): + return { + "zoom_params_file": "tests/test_data/test_jCameraManZoomLevels.xml", + "oav_config_json": "tests/test_data/test_OAVCentring.json", + "display_config": "tests/test_data/test_display.configuration", + } + + +@pytest.fixture +def test_full_grid_scan_params(): + params = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_grid_with_edge_detect_parameters.json" + ) + return GridScanWithEdgeDetect(**params) + + +@pytest.fixture() +def fake_create_devices( + eiger: EigerDetector, + smargon: Smargon, + zebra: Zebra, + detector_motion: DetectorMotion, + aperture_scatterguard: ApertureScatterguard, +): + mock_omega_sets = MagicMock(return_value=NullStatus()) + + smargon.omega.velocity.set = mock_omega_sets + smargon.omega.set = mock_omega_sets + + devices = { + "eiger": eiger, + "smargon": smargon, + "zebra": zebra, + "detector_motion": detector_motion, + "backlight": i03.backlight(fake_with_ophyd_sim=True), + "ap_sg": aperture_scatterguard, + } + return devices + + +@pytest.fixture() +def fake_create_rotation_devices( + eiger: EigerDetector, + smargon: Smargon, + zebra: Zebra, + detector_motion: DetectorMotion, + backlight: Backlight, + attenuator: Attenuator, + flux: Flux, + undulator: Undulator, + aperture_scatterguard: ApertureScatterguard, + synchrotron: Synchrotron, + s4_slit_gaps: S4SlitGaps, + dcm: DCM, + robot: BartRobot, + oav: OAV, +): + set_mock_value(smargon.omega.max_velocity, 131) + oav.zoom_controller.onst.sim_put("1.0x") # type: ignore + oav.zoom_controller.fvst.sim_put("5.0x") # type: ignore + + return RotationScanComposite( + attenuator=attenuator, + backlight=backlight, + dcm=dcm, + detector_motion=detector_motion, + eiger=eiger, + flux=flux, + smargon=smargon, + undulator=undulator, + aperture_scatterguard=aperture_scatterguard, + synchrotron=synchrotron, + s4_slit_gaps=s4_slit_gaps, + zebra=zebra, + robot=robot, + oav=oav, + ) + + +@pytest.fixture +def zocalo(done_status): + zoc = i03.zocalo(fake_with_ophyd_sim=True) + zoc.stage = MagicMock(return_value=done_status) + zoc.unstage = MagicMock(return_value=done_status) + return zoc + + +@pytest.fixture +async def panda(RE: RunEngine): + class MockBlock(Device): + def __init__( + self, + prefix: str, + name: str = "", + attributes: dict[str, Any] = {}, # noqa + ): + for name, dtype in attributes.items(): + setattr(self, name, epics_signal_rw(dtype, "", "")) + + def mock_vector_block(n, attributes): + return DeviceVector( + {i: MockBlock(f"{i}", f"{i}", attributes) for i in range(n)} + ) + + async def set_mock_blocks( + panda, mock_blocks: dict[str, tuple[int, dict[str, Any]]] + ): + for name, block in mock_blocks.items(): + n, attrs = block + block = mock_vector_block(n, attrs) + await block.connect(mock=True) + setattr(panda, name, block) + + async def create_mock_signals(devices_and_signals: dict[Device, dict[str, Any]]): + for device, signals in devices_and_signals.items(): + for name, dtype in signals.items(): + sig = epics_signal_rw(dtype, name, name) + await sig.connect(mock=True) + setattr(device, name, sig) + + panda = i03.panda(fake_with_ophyd_sim=True) + await set_mock_blocks( + panda, + { + "inenc": (8, {"setp": float}), + "clock": (8, {"period": float}), + "counter": (8, {"enable": str}), + }, + ) + await create_mock_signals( + { + panda.pcap: {"enable": str}, + **{panda.pulse[i]: {"enable": str} for i in panda.pulse.keys()}, + } + ) + + set_mock_value( + panda.data.datasets, DatasetTable(name=np.array(["name"]), hdf5_type=[]) + ) + + return panda + + +@pytest.fixture +def oav_parameters_for_rotation(test_config_files) -> OAVParameters: + return OAVParameters(oav_config_json=test_config_files["oav_config_json"]) + + +async def async_status_done(): + await asyncio.sleep(0) + + +def mock_gridscan_kickoff_complete(gridscan: FastGridScanCommon): + gridscan.kickoff = MagicMock(return_value=async_status_done) + gridscan.complete = MagicMock(return_value=async_status_done) + + +@pytest.fixture +async def fake_fgs_composite( + smargon: Smargon, + test_fgs_params: ThreeDGridScan, + RE: RunEngine, + done_status, + attenuator, + xbpm_feedback, + synchrotron, + aperture_scatterguard, + zocalo, + dcm, + panda, +): + fake_composite = FlyScanXRayCentreComposite( + aperture_scatterguard=aperture_scatterguard, + attenuator=attenuator, + backlight=i03.backlight(fake_with_ophyd_sim=True), + dcm=dcm, + # We don't use the eiger fixture here because .unstage() is used in some tests + eiger=i03.eiger(fake_with_ophyd_sim=True), + zebra_fast_grid_scan=i03.zebra_fast_grid_scan(fake_with_ophyd_sim=True), + flux=i03.flux(fake_with_ophyd_sim=True), + s4_slit_gaps=i03.s4_slit_gaps(fake_with_ophyd_sim=True), + smargon=smargon, + undulator=i03.undulator(fake_with_ophyd_sim=True), + synchrotron=synchrotron, + xbpm_feedback=xbpm_feedback, + zebra=i03.zebra(fake_with_ophyd_sim=True), + zocalo=zocalo, + panda=panda, + panda_fast_grid_scan=i03.panda_fast_grid_scan(fake_with_ophyd_sim=True), + robot=i03.robot(fake_with_ophyd_sim=True), + ) + + fake_composite.eiger.stage = MagicMock(return_value=done_status) + # unstage should be mocked on a per-test basis because several rely on unstage + fake_composite.eiger.set_detector_parameters(test_fgs_params.detector_params) + fake_composite.eiger.ALL_FRAMES_TIMEOUT = 2 # type: ignore + fake_composite.eiger.stop_odin_when_all_frames_collected = MagicMock() + fake_composite.eiger.odin.check_odin_state = lambda: True + + test_result = { + "centre_of_mass": [6, 6, 6], + "max_voxel": [5, 5, 5], + "max_count": 123456, + "n_voxels": 321, + "total_count": 999999, + "bounding_box": [[3, 3, 3], [9, 9, 9]], + } + + @AsyncStatus.wrap + async def mock_complete(result): + await fake_composite.zocalo._put_results([result], {"dcid": 0, "dcgid": 0}) + + fake_composite.zocalo.trigger = MagicMock( + side_effect=partial(mock_complete, test_result) + ) # type: ignore + fake_composite.zocalo.timeout_s = 3 + set_mock_value(fake_composite.zebra_fast_grid_scan.scan_invalid, False) + set_mock_value(fake_composite.zebra_fast_grid_scan.position_counter, 0) + set_mock_value(fake_composite.smargon.x.max_velocity, 10) + + set_mock_value(fake_composite.robot.barcode, "BARCODE") + + return fake_composite + + +def fake_read(obj, initial_positions, _): + initial_positions[obj] = 0 + yield Msg("null", obj) + + +def extract_metafile(input_filename, output_filename): + with gzip.open(input_filename) as metafile_fo: + with open(output_filename, "wb") as output_fo: + output_fo.write(metafile_fo.read()) + + +@pytest.fixture +def sim_run_engine(): + return RunEngineSimulator() + + +class DocumentCapturer: + """A utility which can be subscribed to the RunEngine in place of a callback in order + to intercept documents and make assertions about their contents""" + + def __init__(self) -> None: + self.docs_received: list[tuple[str, dict[str, Any]]] = [] + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + self.docs_received.append((args[0], args[1])) + + @staticmethod + def is_match( + doc: tuple[str, dict[str, Any]], + name: str, + has_fields: Sequence[str] = [], + matches_fields: dict[str, Any] = {}, # noqa + ): + """Returns True if the given document: + - has the same name + - contains all the fields in has_fields + - contains all the fields in matches_fields with the same content""" + + return ( + doc[0] == name + and all(f in doc[1].keys() for f in has_fields) + and matches_fields.items() <= doc[1].items() + ) + + @staticmethod + def get_matches( + docs: list[tuple[str, dict[str, Any]]], + name: str, + has_fields: Sequence[str] = [], + matches_fields: dict[str, Any] = {}, # noqa + ): + """Get all the docs from docs which: + - have the same name + - contain all the fields in has_fields + - contain all the fields in matches_fields with the same content""" + return list( + filter( + partial( + DocumentCapturer.is_match, + name=name, + has_fields=has_fields, + matches_fields=matches_fields, + ), + docs, + ) + ) + + @staticmethod + def assert_doc( + docs: list[tuple[str, dict[str, Any]]], + name: str, + has_fields: Sequence[str] = [], + matches_fields: dict[str, Any] = {}, # noqa + does_exist: bool = True, + ): + """Assert that a matching doc has been recieved by the sim, + and returns the first match if it is meant to exist""" + matches = DocumentCapturer.get_matches(docs, name, has_fields, matches_fields) + if does_exist: + assert matches + return matches[0] + else: + assert matches == [] + + @staticmethod + def get_docs_until( + docs: list[tuple[str, dict[str, Any]]], + name: str, + has_fields: Sequence[str] = [], + matches_fields: dict[str, Any] = {}, # noqa + ): + """return all the docs from the list of docs until the first matching one""" + for i, doc in enumerate(docs): + if DocumentCapturer.is_match(doc, name, has_fields, matches_fields): + return docs[: i + 1] + raise ValueError(f"Doc {name=}, {has_fields=}, {matches_fields=} not found") + + @staticmethod + def get_docs_from( + docs: list[tuple[str, dict[str, Any]]], + name: str, + has_fields: Sequence[str] = [], + matches_fields: dict[str, Any] = {}, # noqa + ): + """return all the docs from the list of docs after the first matching one""" + for i, doc in enumerate(docs): + if DocumentCapturer.is_match(doc, name, has_fields, matches_fields): + return docs[i:] + raise ValueError(f"Doc {name=}, {has_fields=}, {matches_fields=} not found") + + @staticmethod + def assert_events_and_data_in_order( + docs: list[tuple[str, dict[str, Any]]], + match_data_keys_list: Sequence[Sequence[str]], + ): + for event_data_keys in match_data_keys_list: + docs = DocumentCapturer.get_docs_from(docs, "event") + doc = docs.pop(0)[1]["data"] + assert all( + k in doc.keys() for k in event_data_keys + ), f"One of {event_data_keys=} not in {doc}" + + +@pytest.fixture +def feature_flags(): + return FeatureFlags( + **{field_name: False for field_name in FeatureFlags.__fields__.keys()} + ) diff --git a/tests/system_tests/__init__.py b/tests/system_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system_tests/hyperion/__init__.py b/tests/system_tests/hyperion/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system_tests/hyperion/experiment_plans/__init__.py b/tests/system_tests/hyperion/experiment_plans/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system_tests/hyperion/experiment_plans/test_fgs_plan.py b/tests/system_tests/hyperion/experiment_plans/test_fgs_plan.py new file mode 100644 index 000000000..ea25ca3f5 --- /dev/null +++ b/tests/system_tests/hyperion/experiment_plans/test_fgs_plan.py @@ -0,0 +1,350 @@ +import uuid +from collections.abc import Callable +from unittest.mock import MagicMock, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import pytest +import pytest_asyncio +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.devices.aperturescatterguard import AperturePosition +from dodal.devices.smargon import Smargon +from ophyd.sim import NullStatus +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_pre_collection, +) +from mx_bluesky.hyperion.device_setup_plans.xbpm_feedback import ( + transmission_and_xbpm_feedback_for_collection_decorator, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + FlyScanXRayCentreComposite, + flyscan_xray_centre, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_gridscan_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback import ( + GridscanNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import IspybIds +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +from ....conftest import default_raw_params +from ..external_interaction.conftest import ( # noqa + fetch_comment, + zocalo_env, +) + + +@pytest.fixture +def params(): + params = ThreeDGridScan(**default_raw_params()) + params.beamline = CONST.SIM.BEAMLINE + params.zocalo_environment = "dev_artemis" + yield params + + +@pytest.fixture() +def callbacks(params): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter" + ): + _, ispyb_cb = create_gridscan_callbacks() + ispyb_cb.ispyb_config = CONST.SIM.DEV_ISPYB_DATABASE_CFG + yield callbacks + + +def reset_positions(smargon: Smargon): + yield from bps.mv(smargon.x, -1, smargon.y, -1, smargon.z, -1) + + +@pytest_asyncio.fixture +async def fxc_composite(): + with ( + patch("dodal.devices.zocalo.zocalo_results._get_zocalo_connection"), + patch("dodal.devices.zocalo.zocalo_results.workflows.recipe"), + patch("dodal.devices.zocalo.zocalo_results.workflows.recipe"), + ): + zocalo = i03.zocalo() + + composite = FlyScanXRayCentreComposite( + attenuator=i03.attenuator(), + aperture_scatterguard=i03.aperture_scatterguard(), + backlight=i03.backlight(), + dcm=i03.dcm(fake_with_ophyd_sim=True), + eiger=i03.eiger(), + zebra_fast_grid_scan=i03.zebra_fast_grid_scan(), + flux=i03.flux(fake_with_ophyd_sim=True), + robot=i03.robot(fake_with_ophyd_sim=True), + panda=i03.panda(fake_with_ophyd_sim=True), + panda_fast_grid_scan=i03.panda_fast_grid_scan(fake_with_ophyd_sim=True), + s4_slit_gaps=i03.s4_slit_gaps(), + smargon=i03.smargon(), + undulator=i03.undulator(), + synchrotron=i03.synchrotron(fake_with_ophyd_sim=True), + xbpm_feedback=i03.xbpm_feedback(fake_with_ophyd_sim=True), + zebra=i03.zebra(), + zocalo=zocalo, + ) + + await composite.robot.barcode._backend.put("ABCDEFGHIJ") # type: ignore + composite.dcm.energy_in_kev.user_readback.sim_put(12.345) # type: ignore + + large = composite.aperture_scatterguard._loaded_positions[AperturePosition.LARGE] + await composite.aperture_scatterguard._set_raw_unsafe(large.location) + composite.eiger.cam.manual_trigger.put("Yes") + composite.eiger.odin.check_odin_initialised = lambda: (True, "") + composite.eiger.stage = MagicMock(return_value=NullStatus()) + composite.eiger.unstage = MagicMock(return_value=NullStatus()) + + set_mock_value(composite.xbpm_feedback.pos_ok, True) + set_mock_value(composite.xbpm_feedback.pos_stable, True) + + return composite + + +@pytest.mark.s03 +def test_s03_devices_connect(fxc_composite: FlyScanXRayCentreComposite): + assert fxc_composite.aperture_scatterguard + assert fxc_composite.backlight + + +@pytest.mark.s03 +def test_read_hardware_pre_collection( + RE: RunEngine, + fxc_composite: FlyScanXRayCentreComposite, +): + @bpp.run_decorator() + def read_run(u, s, g, r, a, f, dcm, ap_sg, sm): + yield from read_hardware_pre_collection( + undulator=u, synchrotron=s, s4_slit_gaps=g, robot=r, smargon=sm + ) + yield from read_hardware_during_collection( + ap_sg, a, f, dcm, fxc_composite.eiger + ) + + RE( + read_run( + fxc_composite.undulator, + fxc_composite.synchrotron, + fxc_composite.s4_slit_gaps, + fxc_composite.robot, + fxc_composite.attenuator, + fxc_composite.flux, + fxc_composite.dcm, + fxc_composite.aperture_scatterguard, + fxc_composite.smargon, + ) + ) + + +@pytest.mark.s03 +async def test_xbpm_feedback_decorator( + RE: RunEngine, + fxc_composite: FlyScanXRayCentreComposite, + params: ThreeDGridScan, + callbacks: tuple[GridscanNexusFileCallback, GridscanISPyBCallback], +): + # This test is currently kind of more a unit test since we are faking XBPM feedback + # with ophyd.sim, but it should continue to pass when we replace it with something + # in S03 + + @transmission_and_xbpm_feedback_for_collection_decorator( + fxc_composite.xbpm_feedback, + fxc_composite.attenuator, + params.transmission_frac, + ) + def decorated_plan(): + yield from bps.sleep(0.1) + + RE(decorated_plan()) + assert await fxc_composite.xbpm_feedback.pos_stable.get_value() == 1 + + +@pytest.mark.s03 +@patch("bluesky.plan_stubs.wait", autospec=True) +@patch("bluesky.plan_stubs.kickoff", autospec=True) +@patch("bluesky.plan_stubs.complete", autospec=True) +@patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan_and_move", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.set_zebra_shutter_to_manual", + autospec=True, +) +def test_full_plan_tidies_at_end( + set_shutter_to_manual: MagicMock, + run_gridscan_and_move: MagicMock, + complete: MagicMock, + kickoff: MagicMock, + wait: MagicMock, + fxc_composite: FlyScanXRayCentreComposite, + params: ThreeDGridScan, + RE: RunEngine, + callbacks: tuple[GridscanNexusFileCallback, GridscanISPyBCallback], +): + RE(reset_positions(fxc_composite.smargon)) + nexus_cb, ispyb_cb = callbacks + nexus_cb.nexus_writer_1 = MagicMock() + nexus_cb.nexus_writer_2 = MagicMock() + ispyb_cb.ispyb_ids = IspybIds( + data_collection_ids=(0, 0), data_collection_group_id=0, grid_ids=(0,) + ) + [RE.subscribe(cb) for cb in callbacks] + RE(flyscan_xray_centre(fxc_composite, params)) + set_shutter_to_manual.assert_called_once() + + +@pytest.mark.s03 +@patch("bluesky.plan_stubs.wait", autospec=True) +@patch("bluesky.plan_stubs.kickoff", autospec=True) +@patch("bluesky.plan_stubs.complete", autospec=True) +@patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan_and_move", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.set_zebra_shutter_to_manual", + autospec=True, +) +def test_full_plan_tidies_at_end_when_plan_fails( + set_shutter_to_manual: MagicMock, + run_gridscan_and_move: MagicMock, + complete: MagicMock, + kickoff: MagicMock, + wait: MagicMock, + fxc_composite: FlyScanXRayCentreComposite, + params: ThreeDGridScan, + RE: RunEngine, +): + class _Exception(Exception): ... + + run_gridscan_and_move.side_effect = _Exception() + with pytest.raises(_Exception): + RE(flyscan_xray_centre(fxc_composite, params)) + set_shutter_to_manual.assert_called_once() + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger" +) +@pytest.mark.s03 +def test_GIVEN_scan_invalid_WHEN_plan_run_THEN_ispyb_entry_made_but_no_zocalo_entry( + zocalo_trigger: MagicMock, + RE: RunEngine, + fxc_composite: FlyScanXRayCentreComposite, + fetch_comment: Callable, # noqa + params: ThreeDGridScan, + callbacks: tuple[GridscanNexusFileCallback, GridscanISPyBCallback], +): + _, ispyb_cb = callbacks + params.storage_directory = "./tmp" + params.file_name = str(uuid.uuid1()) + + # Currently s03 calls anything with z_steps > 1 invalid + params.z_steps = 100 + RE(reset_positions(fxc_composite.smargon)) + + [RE.subscribe(cb) for cb in callbacks] + with pytest.raises(WarningException): + RE(flyscan_xray_centre(fxc_composite, params)) + + ids = ispyb_cb.ispyb_ids + assert ids.data_collection_group_id is not None + dcid_used = ispyb_cb.ispyb_ids.data_collection_ids[0] + + comment = fetch_comment(dcid_used) + + assert "too long/short/bent" in comment + zocalo_trigger.run_start.assert_not_called() + + +@pytest.mark.s03 +async def test_complete_xray_centre_plan_with_no_callbacks_falls_back_to_centre( + RE: RunEngine, + fxc_composite: FlyScanXRayCentreComposite, + zocalo_env: None, # noqa + params: ThreeDGridScan, + callbacks, + done_status, +): + fxc_composite.zebra_fast_grid_scan.kickoff = MagicMock(return_value=done_status) + fxc_composite.zebra_fast_grid_scan.complete = MagicMock(return_value=done_status) + + params.storage_directory = "./tmp" + params.file_name = str(uuid.uuid1()) + + # Currently s03 calls anything with z_steps > 1 invalid + params.z_steps = 1 + + RE(reset_positions(fxc_composite.smargon)) + + def zocalo_trigger(): + fxc_composite.zocalo._raw_results_received.put({"results": []}) + return done_status + + # [RE.subscribe(cb) for cb in callbacks] + fxc_composite.zocalo.trigger = MagicMock(side_effect=zocalo_trigger) + RE(flyscan_xray_centre(fxc_composite, params)) + + # The following numbers are derived from the centre returned in fake_zocalo + assert ( + await fxc_composite.sample_motors.x.user_readback.get_value() + == pytest.approx(-1) + ) + assert ( + await fxc_composite.sample_motors.y.user_readback.get_value() + == pytest.approx(-1) + ) + assert ( + await fxc_composite.sample_motors.z.user_readback.get_value() + == pytest.approx(-1) + ) + + +@pytest.mark.s03 +async def test_complete_xray_centre_plan_with_callbacks_moves_to_centre( + RE: RunEngine, + fxc_composite: FlyScanXRayCentreComposite, + zocalo_env: None, # noqa + params: ThreeDGridScan, + callbacks, + done_status, +): + fxc_composite.zebra_fast_grid_scan.kickoff = MagicMock(return_value=done_status) + fxc_composite.zebra_fast_grid_scan.complete = MagicMock(return_value=done_status) + + params.storage_directory = "./tmp" + params.file_name = str(uuid.uuid1()) + + # Currently s03 calls anything with z_steps > 1 invalid + params.z_steps = 1 + + RE(reset_positions(fxc_composite.smargon)) + + [RE.subscribe(cb) for cb in callbacks] + RE(flyscan_xray_centre(fxc_composite, params)) + + # The following numbers are derived from the centre returned in fake_zocalo + assert ( + await fxc_composite.sample_motors.x.user_readback.get_value() + == pytest.approx(0.05) + ) + assert ( + await fxc_composite.sample_motors.y.user_readback.get_value() + == pytest.approx(0.15) + ) + assert ( + await fxc_composite.sample_motors.z.user_readback.get_value() + == pytest.approx(0.25) + ) diff --git a/tests/system_tests/hyperion/experiment_plans/test_plan_system.py b/tests/system_tests/hyperion/experiment_plans/test_plan_system.py new file mode 100644 index 000000000..c17b8f44c --- /dev/null +++ b/tests/system_tests/hyperion/experiment_plans/test_plan_system.py @@ -0,0 +1,70 @@ +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.common.beamlines.beamline_parameters import ( + BEAMLINE_PARAMETER_PATHS, + GDABeamlineParameters, +) +from dodal.devices.aperturescatterguard import ( + ApertureScatterguard, + load_positions_from_beamline_parameters, + load_tolerances_from_beamline_params, +) +from dodal.devices.s4_slit_gaps import S4SlitGaps +from dodal.devices.undulator import Undulator + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_pre_collection, +) +from mx_bluesky.hyperion.parameters.constants import CONST + + +@pytest.mark.s03 +async def test_getting_data_for_ispyb(): + params = GDABeamlineParameters.from_file(BEAMLINE_PARAMETER_PATHS["i03"]) + undulator = Undulator( + f"{CONST.SIM.INSERTION_PREFIX}-MO-SERVC-01:", name="undulator" + ) + synchrotron = i03.synchrotron(fake_with_ophyd_sim=True) + slit_gaps = S4SlitGaps(f"{CONST.SIM.BEAMLINE}-AL-SLITS-04:", name="slits") + attenuator = i03.attenuator(fake_with_ophyd_sim=True) + flux = i03.flux(fake_with_ophyd_sim=True) + dcm = i03.dcm(fake_with_ophyd_sim=True) + aperture_scatterguard = ApertureScatterguard( + prefix="BL03S", + name="ap_sg", + loaded_positions=load_positions_from_beamline_parameters(params), + tolerances=load_tolerances_from_beamline_params(params), + ) + smargon = i03.smargon(fake_with_ophyd_sim=True) + eiger = i03.eiger(fake_with_ophyd_sim=True) + await undulator.connect() + await synchrotron.connect() + slit_gaps.wait_for_connection() + await attenuator.connect() + flux.wait_for_connection() + await aperture_scatterguard.connect() + await smargon.connect() + robot = i03.robot(fake_with_ophyd_sim=True) + + RE = RunEngine() + + @bpp.run_decorator() + def standalone_read_hardware(und, syn, slits, robot, att, flux, ap_sg, sm): + yield from read_hardware_pre_collection(und, syn, slits, robot, smargon=sm) + yield from read_hardware_during_collection(ap_sg, att, flux, dcm, eiger) + + RE( + standalone_read_hardware( + undulator, + synchrotron, + slit_gaps, + robot, + attenuator, + flux, + aperture_scatterguard, + smargon, + ) + ) diff --git a/tests/system_tests/hyperion/external_interaction/__init__.py b/tests/system_tests/hyperion/external_interaction/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system_tests/hyperion/external_interaction/callbacks/__init__.py b/tests/system_tests/hyperion/external_interaction/callbacks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/system_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py b/tests/system_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py new file mode 100644 index 000000000..5a7443784 --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py @@ -0,0 +1,266 @@ +from __future__ import annotations + +import os +import re +import signal +import subprocess +import threading +from genericpath import isfile +from time import sleep +from unittest.mock import MagicMock, patch + +import bluesky.plan_stubs as bps +import numpy as np +import pytest +import pytest_asyncio +import zmq +from bluesky.callbacks import CallbackBase +from bluesky.callbacks.zmq import Publisher +from bluesky.run_engine import RunEngine +from dodal.devices.zocalo import ZocaloResults +from ophyd_async.core import set_mock_value +from zmq.utils.monitor import recv_monitor_message + +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + FlyScanXRayCentreComposite, + flyscan_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, + rotation_scan, +) +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from mx_bluesky.hyperion.parameters.rotation import RotationScan +from mx_bluesky.hyperion.utils.utils import convert_angstrom_to_eV + +from .....conftest import fake_read +from ..conftest import ( # noqa + fetch_comment, + zocalo_env, +) + +""" +Note that because these tests use the external processes some of the errors coming from +them may not be very informative. You will want to check the log files produced in `tmp` +for better logs. +""" + + +@pytest_asyncio.fixture +async def zocalo_device(): + zd = ZocaloResults() + zd.timeout_s = 5 + await zd.connect() + return zd + + +class DocumentCatcher(CallbackBase): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.start = MagicMock() + self.descriptor = MagicMock() + self.event = MagicMock() + self.stop = MagicMock() + + +def event_monitor(monitor: zmq.Socket, connection_active_lock: threading.Lock) -> None: + try: + while monitor.poll(): + monitor_event = recv_monitor_message(monitor) + LOGGER.info(f"Event: {monitor_event}") + if monitor_event["event"] == zmq.EVENT_CONNECTED: + LOGGER.info("CONNECTED - acquiring connection_active_lock") + connection_active_lock.acquire() + if monitor_event["event"] == zmq.EVENT_MONITOR_STOPPED: + break + except zmq.ZMQError: + pass + finally: + connection_active_lock.release() + monitor.close() + LOGGER.info("event monitor thread done!") + + +@pytest.fixture +def RE_with_external_callbacks(): + RE = RunEngine() + old_ispyb_config = os.environ.get("ISPYB_CONFIG_PATH") + + process_env = os.environ.copy() + process_env["ISPYB_CONFIG_PATH"] = CONST.SIM.DEV_ISPYB_DATABASE_CFG + + external_callbacks_process = subprocess.Popen( + [ + "python", + "src/hyperion/external_interaction/callbacks/__main__.py", + "--dev", + ], + env=process_env, + ) + publisher = Publisher(f"localhost:{CONST.CALLBACK_0MQ_PROXY_PORTS[0]}") + monitor = publisher._socket.get_monitor_socket() + + connection_active_lock = threading.Lock() + t = threading.Thread(target=event_monitor, args=(monitor, connection_active_lock)) + t.start() + + while not connection_active_lock.locked(): + sleep(0.1) # wait for connection to happen before continuing + + sub_id = RE.subscribe(publisher) + + yield RE + + RE.unsubscribe(sub_id) + publisher.close() + + external_callbacks_process.send_signal(signal.SIGINT) + sleep(0.01) + external_callbacks_process.kill() + t.join() + if old_ispyb_config: + os.environ["ISPYB_CONFIG_PATH"] = old_ispyb_config + + +@pytest.mark.s03 +def test_RE_with_external_callbacks_starts_and_stops( + RE_with_external_callbacks: RunEngine, +): + RE = RE_with_external_callbacks + + def plan(): + yield from bps.sleep(1) + + RE(plan()) + + +@pytest.mark.s03 +async def test_external_callbacks_handle_gridscan_ispyb_and_zocalo( + RE_with_external_callbacks: RunEngine, + zocalo_env, # noqa + test_fgs_params: ThreeDGridScan, + fake_fgs_composite: FlyScanXRayCentreComposite, + done_status, + zocalo_device: ZocaloResults, + fetch_comment, # noqa +): + """This test doesn't actually require S03 to be running, but it does require fake + zocalo, and a connection to the dev ISPyB database; like S03 tests, it can only run + locally at DLS.""" + + RE = RE_with_external_callbacks + test_fgs_params.zocalo_environment = "dev_artemis" + + set_mock_value( + fake_fgs_composite.aperture_scatterguard._aperture.z.user_setpoint, 2 + ) + fake_fgs_composite.eiger.unstage = MagicMock(return_value=done_status) # type: ignore + fake_fgs_composite.smargon.stub_offsets.set = MagicMock(return_value=done_status) # type: ignore + fake_fgs_composite.zocalo = zocalo_device + + doc_catcher = DocumentCatcher() + RE.subscribe(doc_catcher) + + # Run the xray centring plan + RE(flyscan_xray_centre(fake_fgs_composite, test_fgs_params)) + + # Check that we we emitted a valid reading from the zocalo device + zocalo_event = doc_catcher.event.call_args.args[0]["data"] # type: ignore + assert np.all(zocalo_event["zocalo-centres_of_mass"][0] == [1, 2, 3]) + assert np.all(zocalo_event["zocalo-bbox_sizes"][0] == [6, 6, 5]) + + # get dcids from zocalo device + dcid_reading = await zocalo_device.ispyb_dcid.read() + dcgid_reading = await zocalo_device.ispyb_dcgid.read() + + dcid = dcid_reading["zocalo-ispyb_dcid"]["value"] + dcgid = dcgid_reading["zocalo-ispyb_dcgid"]["value"] + + assert dcid != 0 + assert dcgid != 0 + + # check the data in dev ispyb corresponding to this "collection" + ispyb_comment = fetch_comment(dcid) + assert ispyb_comment != "" + assert "Diffraction grid scan of 40 by 20 images" in ispyb_comment + assert "Zocalo processing took" in ispyb_comment + assert "Position (grid boxes) ['1', '2', '3']" in ispyb_comment + assert "Size (grid boxes) [6 6 5];" in ispyb_comment + + +@pytest.mark.s03() +def test_remote_callbacks_write_to_dev_ispyb_for_rotation( + RE_with_external_callbacks: RunEngine, + test_rotation_params: RotationScan, + fetch_comment, # noqa + fetch_datacollection_attribute, + undulator, + attenuator, + synchrotron, + s4_slit_gaps, + flux, + robot, + aperture_scatterguard, + fake_create_devices, +): + test_wl = 0.71 + test_bs_x = 0.023 + test_bs_y = 0.047 + test_exp_time = 0.023 + test_img_wid = 0.27 + + test_rotation_params.rotation_increment_deg = test_img_wid + test_rotation_params.exposure_time_s = test_exp_time + test_rotation_params.demand_energy_ev = convert_angstrom_to_eV(test_wl) + + composite = RotationScanComposite( + aperture_scatterguard=aperture_scatterguard, + attenuator=attenuator, + backlight=fake_create_devices["backlight"], + dcm=fake_create_devices["dcm"], + detector_motion=fake_create_devices["detector_motion"], + eiger=fake_create_devices["eiger"], + flux=flux, + smargon=fake_create_devices["smargon"], + undulator=undulator, + synchrotron=synchrotron, + s4_slit_gaps=s4_slit_gaps, + zebra=fake_create_devices["zebra"], + robot=robot, + oav=fake_create_devices["oav"], + ) + + with patch("bluesky.preprocessors.__read_and_stash_a_motor", fake_read): + RE_with_external_callbacks( + rotation_scan( + composite, + test_rotation_params, + ) + ) + + sleep(1) + assert isfile("tmp/dev/hyperion_ispyb_callback.log") + ispyb_log_tail = subprocess.run( + ["tail", "tmp/dev/hyperion_ispyb_callback.log"], + timeout=1, + stdout=subprocess.PIPE, + ).stdout.decode("utf-8") + + ids_re = re.compile(r"data_collection_ids=(\d+) data_collection_group_id=(\d+) ") + matches = ids_re.findall(ispyb_log_tail) + + dcid = matches[0][0] + + comment = fetch_comment(dcid) + assert comment == "Hyperion rotation scan" + wavelength = fetch_datacollection_attribute(dcid, "wavelength") + beamsize_x = fetch_datacollection_attribute(dcid, "beamSizeAtSampleX") + beamsize_y = fetch_datacollection_attribute(dcid, "beamSizeAtSampleY") + exposure = fetch_datacollection_attribute(dcid, "exposureTime") + + assert wavelength == test_wl + assert beamsize_x == test_bs_x + assert beamsize_y == test_bs_y + assert exposure == test_exp_time diff --git a/tests/system_tests/hyperion/external_interaction/conftest.py b/tests/system_tests/hyperion/external_interaction/conftest.py new file mode 100644 index 000000000..d1a99d397 --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/conftest.py @@ -0,0 +1,169 @@ +import os +from collections.abc import Callable +from functools import partial +from typing import Any + +import ispyb.sqlalchemy +import pytest +from ispyb.sqlalchemy import DataCollection, DataCollectionGroup, GridInfo, Position +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import StoreInIspyb +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +from ....conftest import raw_params_from_file + +TEST_RESULT_LARGE = [ + { + "centre_of_mass": [1, 2, 3], + "max_voxel": [1, 2, 3], + "max_count": 105062, + "n_voxels": 35, + "total_count": 2387574, + "bounding_box": [[2, 2, 2], [8, 8, 7]], + } +] +TEST_RESULT_MEDIUM = [ + { + "centre_of_mass": [1, 2, 3], + "max_voxel": [2, 4, 5], + "max_count": 105062, + "n_voxels": 35, + "total_count": 2387574, + "bounding_box": [[1, 2, 3], [3, 4, 4]], + } +] +TEST_RESULT_SMALL = [ + { + "centre_of_mass": [1, 2, 3], + "max_voxel": [1, 2, 3], + "max_count": 105062, + "n_voxels": 35, + "total_count": 1387574, + "bounding_box": [[2, 2, 2], [3, 3, 3]], + } +] + + +def get_current_datacollection_comment(Session: Callable, dcid: int) -> str: + """Read the 'comments' field from the given datacollection id's ISPyB entry. + Returns an empty string if the comment is not yet initialised. + """ + try: + with Session() as session: + query = session.query(DataCollection).filter( + DataCollection.dataCollectionId == dcid + ) + current_comment: str = query.first().comments + except Exception: + current_comment = "" + return current_comment + + +def get_current_datacollection_attribute( + Session: Callable, dcid: int, attr: str +) -> str: + """Read the specified field 'attr' from the given datacollection id's ISPyB entry. + Returns an empty string if the attribute is not found. + """ + try: + with Session() as session: + query = session.query(DataCollection).filter( + DataCollection.dataCollectionId == dcid + ) + first_result = query.first() + data: str = getattr(first_result, attr) + except Exception: + data = "" + return data + + +def get_current_datacollection_grid_attribute( + Session: Callable, grid_id: int, attr: str +) -> Any: + with Session() as session: + query = session.query(GridInfo).filter(GridInfo.gridInfoId == grid_id) + first_result = query.first() + return getattr(first_result, attr) + + +def get_current_position_attribute( + Session: Callable, position_id: int, attr: str +) -> Any: + with Session() as session: + query = session.query(Position).filter(Position.positionId == position_id) + first_result = query.first() + if first_result is None: + return None + return getattr(first_result, attr) + + +def get_current_datacollectiongroup_attribute( + Session: Callable, dcg_id: int, attr: str +): + with Session() as session: + query = session.query(DataCollectionGroup).filter( + DataCollection.dataCollectionGroupId == dcg_id + ) + first_result = query.first() + return getattr(first_result, attr) + + +@pytest.fixture +def sqlalchemy_sessionmaker() -> sessionmaker: + url = ispyb.sqlalchemy.url(CONST.SIM.DEV_ISPYB_DATABASE_CFG) + engine = create_engine(url, connect_args={"use_pure": True}) + return sessionmaker(engine) + + +@pytest.fixture +def fetch_comment(sqlalchemy_sessionmaker) -> Callable: + return partial(get_current_datacollection_comment, sqlalchemy_sessionmaker) + + +@pytest.fixture +def fetch_datacollection_attribute(sqlalchemy_sessionmaker) -> Callable: + return partial(get_current_datacollection_attribute, sqlalchemy_sessionmaker) + + +@pytest.fixture +def fetch_datacollection_grid_attribute(sqlalchemy_sessionmaker) -> Callable: + return partial(get_current_datacollection_grid_attribute, sqlalchemy_sessionmaker) + + +@pytest.fixture +def fetch_datacollection_position_attribute(sqlalchemy_sessionmaker) -> Callable: + return partial(get_current_position_attribute, sqlalchemy_sessionmaker) + + +@pytest.fixture +def fetch_datacollectiongroup_attribute(sqlalchemy_sessionmaker) -> Callable: + return partial(get_current_datacollectiongroup_attribute, sqlalchemy_sessionmaker) + + +@pytest.fixture +def dummy_params(): + dummy_params = ThreeDGridScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/test_gridscan_param_defaults.json" + ) + ) + dummy_params.visit = "cm31105-5" + return dummy_params + + +@pytest.fixture +def dummy_ispyb(dummy_params) -> StoreInIspyb: + return StoreInIspyb(CONST.SIM.DEV_ISPYB_DATABASE_CFG) + + +@pytest.fixture +def dummy_ispyb_3d(dummy_params) -> StoreInIspyb: + return StoreInIspyb(CONST.SIM.DEV_ISPYB_DATABASE_CFG) + + +@pytest.fixture +def zocalo_env(): + os.environ["ZOCALO_CONFIG"] = "/dls_sw/apps/zocalo/live/configuration.yaml" diff --git a/tests/system_tests/hyperion/external_interaction/test_config_service.py b/tests/system_tests/hyperion/external_interaction/test_config_service.py new file mode 100644 index 000000000..47b32252c --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/test_config_service.py @@ -0,0 +1,55 @@ +from unittest.mock import MagicMock +from uuid import uuid4 + +import numpy as np +import pytest +from daq_config_server.client import ConfigServer + +from mx_bluesky.hyperion.external_interaction.config_server import config_server + + +@pytest.fixture +def config_service(): + return config_server() + + +@pytest.mark.s03 +def test_get_beamline_params(config_service: ConfigServer): + resp = config_service.get_beamline_param("miniap_x_SMALL_APERTURE") + assert isinstance(resp, float) + assert np.isclose(resp, 2.43) + + +@pytest.mark.s03 +def test_get_feature_flag(config_service: ConfigServer): + resp = config_service.get_feature_flag("set_stub_offsets") + assert isinstance(resp, bool) + + +@pytest.mark.s03 +def test_get_feature_flags(config_service: ConfigServer): + features = config_service.best_effort_get_all_feature_flags() + assert len(features.keys()) == 3 + + +@pytest.mark.s03 +def test_nonsense_feature_flag_fails_with_normal_call(config_service: ConfigServer): + with pytest.raises(AssertionError): + _ = config_service.get_feature_flag(str(uuid4())) + + +@pytest.mark.s03 +def test_best_effort_gracefully_fails_with_nonsense(config_service: ConfigServer): + resp = config_service.best_effort_get_feature_flag(str(uuid4())) + assert resp is None + + +@pytest.mark.s03 +def test_best_effort_gracefully_fails_if_service_down(config_service: ConfigServer): + log_mock = MagicMock() + config_service = ConfigServer("http://not_real_address", log_mock) + resp = config_service.best_effort_get_feature_flag("set_stub_offsets") + assert resp is None + log_mock.error.assert_called_with( + "Encountered an error reading from the config service.", exc_info=True + ) diff --git a/tests/system_tests/hyperion/external_interaction/test_exp_eye_dev.py b/tests/system_tests/hyperion/external_interaction/test_exp_eye_dev.py new file mode 100644 index 000000000..485705183 --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/test_exp_eye_dev.py @@ -0,0 +1,52 @@ +import os +from time import sleep + +import pytest +from requests import get + +from mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store import ( + ExpeyeInteraction, +) +from mx_bluesky.hyperion.parameters.constants import CONST + + +@pytest.mark.s03 +def test_start_and_end_robot_load(): + """To confirm this test is successful go to + https://ispyb-test.diamond.ac.uk/dc/visit/cm37235-2 and see that data is added + when it's run. + """ + os.environ["ISPYB_CONFIG_PATH"] = CONST.SIM.DEV_ISPYB_DATABASE_CFG + + SAMPLE_ID = 5289780 + BARCODE = "test_barcode" + + expeye = ExpeyeInteraction() + + robot_action_id = expeye.start_load("cm37235", 2, SAMPLE_ID, 40, 3) + + sleep(0.5) + + print(f"Created {robot_action_id}") + + test_folder = "/dls/i03/data/2024/cm37235-2/xtal_snapshots" + oav_snapshot = test_folder + "/235855_load_after_0.0.png" + webcam_snapshot = test_folder + "/235855_webcam.jpg" + expeye.update_barcode_and_snapshots( + robot_action_id, BARCODE, oav_snapshot, webcam_snapshot + ) + + sleep(0.5) + + expeye.end_load(robot_action_id, "fail", "Oh no!") + + get_robot_data_url = f"{expeye.base_url}/robot-actions/{robot_action_id}" + response = get(get_robot_data_url, auth=expeye.auth) + + assert response.ok + + response = response.json() + assert response["robotActionId"] == robot_action_id + assert response["status"] == "ERROR" + assert response["sampleId"] == SAMPLE_ID + assert response["sampleBarcode"] == BARCODE diff --git a/tests/system_tests/hyperion/external_interaction/test_ispyb_dev_connection.py b/tests/system_tests/hyperion/external_interaction/test_ispyb_dev_connection.py new file mode 100644 index 000000000..a9429285d --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/test_ispyb_dev_connection.py @@ -0,0 +1,892 @@ +from __future__ import annotations + +import os +import re +from collections.abc import Callable, Sequence +from copy import deepcopy +from decimal import Decimal +from typing import Any, Literal +from unittest.mock import MagicMock, patch + +import numpy +import pytest +from bluesky.run_engine import RunEngine +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.synchrotron import SynchrotronMode +from ophyd.sim import NullStatus +from ophyd_async.core import AsyncStatus, set_mock_value + +from mx_bluesky.hyperion.experiment_plans import oav_grid_detection_plan +from mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan import ( + GridDetectThenXRayCentreComposite, + grid_detect_then_xray_centre, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, + rotation_scan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping import ( + populate_data_collection_group, + populate_remaining_data_collection_info, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_mapping import ( + construct_comment_for_gridscan, + populate_xy_data_collection_info, + populate_xz_data_collection_info, +) +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.parameters.components import IspybExperimentType +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + ThreeDGridScan, +) +from mx_bluesky.hyperion.parameters.rotation import RotationScan +from mx_bluesky.hyperion.utils.utils import convert_angstrom_to_eV + +from ....conftest import fake_read +from .conftest import raw_params_from_file + +EXPECTED_DATACOLLECTION_FOR_ROTATION = { + "wavelength": 0.71, + "beamSizeAtSampleX": 0.02, + "beamSizeAtSampleY": 0.02, + "exposureTime": 0.023, + "undulatorGap1": 1.12, + "synchrotronMode": SynchrotronMode.USER.value, + "slitGapHorizontal": 0.123, + "slitGapVertical": 0.234, +} + +# Map all the case-sensitive column names from their normalised versions +DATA_COLLECTION_COLUMN_MAP = { + s.lower(): s + for s in [ + "dataCollectionId", + "BLSAMPLEID", + "SESSIONID", + "experimenttype", + "dataCollectionNumber", + "startTime", + "endTime", + "runStatus", + "axisStart", + "axisEnd", + "axisRange", + "overlap", + "numberOfImages", + "startImageNumber", + "numberOfPasses", + "exposureTime", + "imageDirectory", + "imagePrefix", + "imageSuffix", + "imageContainerSubPath", + "fileTemplate", + "wavelength", + "resolution", + "detectorDistance", + "xBeam", + "yBeam", + "comments", + "printableForReport", + "CRYSTALCLASS", + "slitGapVertical", + "slitGapHorizontal", + "transmission", + "synchrotronMode", + "xtalSnapshotFullPath1", + "xtalSnapshotFullPath2", + "xtalSnapshotFullPath3", + "xtalSnapshotFullPath4", + "rotationAxis", + "phiStart", + "kappaStart", + "omegaStart", + "chiStart", + "resolutionAtCorner", + "detector2Theta", + "DETECTORMODE", + "undulatorGap1", + "undulatorGap2", + "undulatorGap3", + "beamSizeAtSampleX", + "beamSizeAtSampleY", + "centeringMethod", + "averageTemperature", + "ACTUALSAMPLEBARCODE", + "ACTUALSAMPLESLOTINCONTAINER", + "ACTUALCONTAINERBARCODE", + "ACTUALCONTAINERSLOTINSC", + "actualCenteringPosition", + "beamShape", + "dataCollectionGroupId", + "POSITIONID", + "detectorId", + "FOCALSPOTSIZEATSAMPLEX", + "POLARISATION", + "FOCALSPOTSIZEATSAMPLEY", + "APERTUREID", + "screeningOrigId", + "flux", + "strategySubWedgeOrigId", + "blSubSampleId", + "processedDataFile", + "datFullPath", + "magnification", + "totalAbsorbedDose", + "binning", + "particleDiameter", + "boxSize", + "minResolution", + "minDefocus", + "maxDefocus", + "defocusStepSize", + "amountAstigmatism", + "extractSize", + "bgRadius", + "voltage", + "objAperture", + "c1aperture", + "c2aperture", + "c3aperture", + "c1lens", + "c2lens", + "c3lens", + "startPositionId", + "endPositionId", + "flux", + "bestWilsonPlotPath", + "totalExposedDose", + "nominalMagnification", + "nominalDefocus", + "imageSizeX", + "imageSizeY", + "pixelSizeOnImage", + "phasePlate", + "dataCollectionPlanId", + ] +} + +GRID_INFO_COLUMN_MAP = { + s.lower(): s + for s in [ + "gridInfoId", + "dataCollectionGroupId", + "xOffset", + "yOffset", + "dx_mm", + "dy_mm", + "steps_x", + "steps_y", + "meshAngle", + "pixelsPerMicronX", + "pixelsPerMicronY", + "snapshot_offsetXPixel", + "snapshot_offsetYPixel", + "recordTimeStamp", + "orientation", + "workflowMeshId", + "snaked", + "dataCollectionId", + "patchesX", + "patchesY", + "micronsPerPixelX", + "micronsPerPixelY", + ] +} + + +@pytest.fixture +def dummy_data_collection_group_info(dummy_params): + return populate_data_collection_group( + dummy_params, + ) + + +@pytest.fixture +def dummy_scan_data_info_for_begin(dummy_params): + info = populate_xy_data_collection_info( + dummy_params.detector_params, + ) + info = populate_remaining_data_collection_info(None, None, info, dummy_params) + return ScanDataInfo( + data_collection_info=info, + ) + + +@pytest.fixture +def grid_detect_then_xray_centre_parameters(): + json_dict = raw_params_from_file( + "tests/test_data/parameter_json_files/ispyb_gridscan_system_test_parameters.json" + ) + return GridScanWithEdgeDetect(**json_dict) + + +# noinspection PyUnreachableCode +@pytest.fixture +def grid_detect_then_xray_centre_composite( + fast_grid_scan, + backlight, + smargon, + undulator, + synchrotron, + s4_slit_gaps, + attenuator, + xbpm_feedback, + detector_motion, + zocalo, + aperture_scatterguard, + zebra, + eiger, + robot, + oav, + dcm, + flux, + ophyd_pin_tip_detection, +): + composite = GridDetectThenXRayCentreComposite( + zebra_fast_grid_scan=fast_grid_scan, + pin_tip_detection=ophyd_pin_tip_detection, + backlight=backlight, + panda_fast_grid_scan=None, # type: ignore + smargon=smargon, + undulator=undulator, + synchrotron=synchrotron, + s4_slit_gaps=s4_slit_gaps, + attenuator=attenuator, + xbpm_feedback=xbpm_feedback, + detector_motion=detector_motion, + zocalo=zocalo, + aperture_scatterguard=aperture_scatterguard, + zebra=zebra, + eiger=eiger, + panda=None, # type: ignore + robot=robot, + oav=oav, + dcm=dcm, + flux=flux, + ) + eiger.odin.fan.consumers_connected.sim_put(True) + eiger.odin.fan.on.sim_put(True) + eiger.odin.meta.initialised.sim_put(True) + oav.zoom_controller.zrst.set("1.0x") + oav.cam.array_size.array_size_x.sim_put(1024) + oav.cam.array_size.array_size_y.sim_put(768) + oav.grid_snapshot.x_size.sim_put(1024) + oav.grid_snapshot.y_size.sim_put(768) + oav.grid_snapshot.top_left_x.set(50) + oav.grid_snapshot.top_left_y.set(100) + oav.grid_snapshot.box_width.set(0.1 * 1000 / 1.25) # size in pixels + set_mock_value(undulator.current_gap, 1.11) + + unpatched_method = oav.parameters.load_microns_per_pixel + eiger.stale_params.sim_put(0) + eiger.odin.meta.ready.sim_put(1) + eiger.odin.meta.active.sim_put(1) + eiger.odin.fan.ready.sim_put(1) + + unpatched_snapshot_trigger = oav.grid_snapshot.trigger + + def mock_snapshot_trigger(): + oav.grid_snapshot.last_path_full_overlay.set("test_1_y") + oav.grid_snapshot.last_path_outer.set("test_2_y") + oav.grid_snapshot.last_saved_path.set("test_3_y") + return unpatched_snapshot_trigger() + + def patch_lmpp(zoom, xsize, ysize): + unpatched_method(zoom, 1024, 768) + + def mock_pin_tip_detect(_): + tip_x_px = 100 + tip_y_px = 200 + microns_per_pixel = 2.87 # from zoom levels .xml + grid_width_px = int(400 / microns_per_pixel) + target_grid_height_px = 70 + top_edge_data = ([0] * tip_x_px) + ( + [(tip_y_px - target_grid_height_px // 2)] * grid_width_px + ) + bottom_edge_data = [0] * tip_x_px + [ + (tip_y_px + target_grid_height_px // 2) + ] * grid_width_px + set_mock_value( + ophyd_pin_tip_detection.triggered_top_edge, + numpy.array(top_edge_data, dtype=numpy.uint32), + ) + + set_mock_value( + ophyd_pin_tip_detection.triggered_bottom_edge, + numpy.array(bottom_edge_data, dtype=numpy.uint32), + ) + set_mock_value( + zocalo.bbox_sizes, numpy.array([[10, 10, 10]], dtype=numpy.uint64) + ) + + yield from [] + return tip_x_px, tip_y_px + + def mock_set_file_name(val, timeout): + eiger.odin.meta.file_name.sim_put(val) # type: ignore + eiger.odin.file_writer.id.sim_put(val) # type: ignore + return NullStatus() + + @AsyncStatus.wrap + async def mock_complete_status(): + pass + + with ( + patch.object(eiger.odin.nodes, "get_init_state", return_value=True), + patch.object(eiger, "wait_on_arming_if_started"), + # xsize, ysize will always be wrong since computed as 0 before we get here + # patch up load_microns_per_pixel connect to receive non-zero values + patch.object( + oav.parameters, + "load_microns_per_pixel", + new=MagicMock(side_effect=patch_lmpp), + ), + patch.object( + oav_grid_detection_plan, + "wait_for_tip_to_be_found", + side_effect=mock_pin_tip_detect, + ), + patch("dodal.devices.areadetector.plugins.MJPG.requests.get"), + patch("dodal.devices.areadetector.plugins.MJPG.Image.open"), + patch.object(oav.grid_snapshot, "post_processing"), + patch.object(oav.grid_snapshot, "trigger", side_effect=mock_snapshot_trigger), + patch.object( + eiger.odin.file_writer.file_name, + "set", + side_effect=mock_set_file_name, + ), + patch.object(fast_grid_scan, "kickoff", return_value=NullStatus()), + patch.object(fast_grid_scan, "complete", return_value=NullStatus()), + patch.object(zocalo, "trigger", return_value=NullStatus()), + ): + yield composite + + +def scan_xy_data_info_for_update( + data_collection_group_id, dummy_params: ThreeDGridScan, scan_data_info_for_begin +): + scan_data_info_for_update = deepcopy(scan_data_info_for_begin) + scan_data_info_for_update.data_collection_info.parent_id = data_collection_group_id + assert dummy_params is not None + scan_data_info_for_update.data_collection_grid_info = DataCollectionGridInfo( + dx_in_mm=dummy_params.x_step_size_um, + dy_in_mm=dummy_params.y_step_size_um, + steps_x=dummy_params.x_steps, + steps_y=dummy_params.y_steps, + microns_per_pixel_x=1.25, + microns_per_pixel_y=1.25, + # cast coordinates from numpy int64 to avoid mysql type conversion issues + snapshot_offset_x_pixel=100, + snapshot_offset_y_pixel=100, + orientation=Orientation.HORIZONTAL, + snaked=True, + ) + scan_data_info_for_update.data_collection_info.comments = ( + construct_comment_for_gridscan( + scan_data_info_for_update.data_collection_grid_info, + ) + ) + return scan_data_info_for_update + + +def scan_data_infos_for_update_3d( + ispyb_ids, scan_xy_data_info_for_update, dummy_params: ThreeDGridScan +): + xz_data_collection_info = populate_xz_data_collection_info( + dummy_params.detector_params + ) + + assert dummy_params.ispyb_params is not None + assert dummy_params is not None + data_collection_grid_info = DataCollectionGridInfo( + dx_in_mm=dummy_params.x_step_size_um, + dy_in_mm=dummy_params.z_step_size_um, + steps_x=dummy_params.x_steps, + steps_y=dummy_params.z_steps, + microns_per_pixel_x=1.25, + microns_per_pixel_y=1.25, + # cast coordinates from numpy int64 to avoid mysql type conversion issues + snapshot_offset_x_pixel=100, + snapshot_offset_y_pixel=50, + orientation=Orientation.HORIZONTAL, + snaked=True, + ) + xz_data_collection_info = populate_remaining_data_collection_info( + construct_comment_for_gridscan(data_collection_grid_info), + ispyb_ids.data_collection_group_id, + xz_data_collection_info, + dummy_params, + ) + xz_data_collection_info.parent_id = ispyb_ids.data_collection_group_id + + scan_xz_data_info_for_update = ScanDataInfo( + data_collection_info=xz_data_collection_info, + data_collection_grid_info=(data_collection_grid_info), + ) + return [scan_xy_data_info_for_update, scan_xz_data_info_for_update] + + +@pytest.fixture +def composite_for_rotation_scan(fake_create_rotation_devices: RotationScanComposite): + energy_ev = convert_angstrom_to_eV(0.71) + set_mock_value( + fake_create_rotation_devices.dcm.energy_in_kev.user_readback, + energy_ev / 1000, # pyright: ignore + ) + set_mock_value(fake_create_rotation_devices.undulator.current_gap, 1.12) # pyright: ignore + set_mock_value( + fake_create_rotation_devices.synchrotron.synchrotron_mode, + SynchrotronMode.USER, + ) + set_mock_value( + fake_create_rotation_devices.synchrotron.top_up_start_countdown, # pyright: ignore + -1, + ) + fake_create_rotation_devices.s4_slit_gaps.xgap.user_readback.sim_put( # pyright: ignore + 0.123 + ) + fake_create_rotation_devices.s4_slit_gaps.ygap.user_readback.sim_put( # pyright: ignore + 0.234 + ) + it_snapshot_filenames = iter( + [ + "/tmp/snapshot1.png", + "/tmp/snapshot2.png", + "/tmp/snapshot3.png", + "/tmp/snapshot4.png", + ] + ) + + with ( + patch("bluesky.preprocessors.__read_and_stash_a_motor", fake_read), + patch.object( + fake_create_rotation_devices.oav.snapshot.last_saved_path, "get" + ) as mock_last_saved_path, + patch("bluesky.plan_stubs.wait"), + ): + + @AsyncStatus.wrap + async def apply_snapshot_filename(): + mock_last_saved_path.return_value = next(it_snapshot_filenames) + + with patch.object( + fake_create_rotation_devices.oav.snapshot, + "trigger", + side_effect=apply_snapshot_filename, + ): + yield fake_create_rotation_devices + + +@pytest.fixture +def params_for_rotation_scan(test_rotation_params: RotationScan): + test_rotation_params.rotation_increment_deg = 0.27 + test_rotation_params.exposure_time_s = 0.023 + test_rotation_params.detector_params.expected_energy_ev = 0.71 + return test_rotation_params + + +@pytest.mark.s03 +def test_ispyb_get_comment_from_collection_correctly(fetch_comment: Callable[..., Any]): + expected_comment_contents = ( + "Xray centring - " + "Diffraction grid scan of 1 by 41 images, " + "Top left [454,-4], Bottom right [455,772]" + ) + + assert fetch_comment(8292317) == expected_comment_contents + + assert fetch_comment(2) == "" + + +@pytest.mark.s03 +def test_ispyb_deposition_comment_correct_on_failure( + dummy_ispyb: StoreInIspyb, + fetch_comment: Callable[..., Any], + dummy_data_collection_group_info, + dummy_scan_data_info_for_begin, +): + ispyb_ids = dummy_ispyb.begin_deposition( + dummy_data_collection_group_info, [dummy_scan_data_info_for_begin] + ) + dummy_ispyb.end_deposition(ispyb_ids, "fail", "could not connect to devices") + assert ( + fetch_comment(ispyb_ids.data_collection_ids[0]) # type: ignore + == "DataCollection Unsuccessful reason: could not connect to devices" + ) + + +@pytest.mark.s03 +def test_ispyb_deposition_comment_correct_for_3D_on_failure( + dummy_ispyb_3d: StoreInIspyb, + fetch_comment: Callable[..., Any], + dummy_params, + dummy_data_collection_group_info, + dummy_scan_data_info_for_begin, +): + ispyb_ids = dummy_ispyb_3d.begin_deposition( + dummy_data_collection_group_info, [dummy_scan_data_info_for_begin] + ) + scan_data_infos = generate_scan_data_infos( + dummy_params, + dummy_scan_data_info_for_begin, + IspybExperimentType.GRIDSCAN_3D, + ispyb_ids, + ) + ispyb_ids = dummy_ispyb_3d.update_deposition(ispyb_ids, scan_data_infos) + dcid1 = ispyb_ids.data_collection_ids[0] # type: ignore + dcid2 = ispyb_ids.data_collection_ids[1] # type: ignore + dummy_ispyb_3d.end_deposition(ispyb_ids, "fail", "could not connect to devices") + assert ( + fetch_comment(dcid1) + == "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 images in 100.0 um by 100.0 um steps. Top left (px): [100,100], bottom right (px): [3300,1700]. DataCollection Unsuccessful reason: could not connect to devices" + ) + assert ( + fetch_comment(dcid2) + == "Hyperion: Xray centring - Diffraction grid scan of 40 by 10 images in 100.0 um by 100.0 um steps. Top left (px): [100,50], bottom right (px): [3300,850]. DataCollection Unsuccessful reason: could not connect to devices" + ) + + +@pytest.mark.s03 +@pytest.mark.parametrize( + "experiment_type, exp_num_of_grids, success", + [ + (IspybExperimentType.GRIDSCAN_2D, 1, False), + (IspybExperimentType.GRIDSCAN_2D, 1, True), + (IspybExperimentType.GRIDSCAN_3D, 2, False), + (IspybExperimentType.GRIDSCAN_3D, 2, True), + ], +) +def test_can_store_2D_ispyb_data_correctly_when_in_error( + experiment_type, + exp_num_of_grids: Literal[1, 2], + success: bool, + fetch_comment: Callable[..., Any], + dummy_params, + dummy_data_collection_group_info, + dummy_scan_data_info_for_begin, +): + ispyb: StoreInIspyb = StoreInIspyb(CONST.SIM.DEV_ISPYB_DATABASE_CFG) + ispyb_ids: IspybIds = ispyb.begin_deposition( + dummy_data_collection_group_info, [dummy_scan_data_info_for_begin] + ) + scan_data_infos = generate_scan_data_infos( + dummy_params, dummy_scan_data_info_for_begin, experiment_type, ispyb_ids + ) + + ispyb_ids = ispyb.update_deposition(ispyb_ids, scan_data_infos) + assert len(ispyb_ids.data_collection_ids) == exp_num_of_grids # type: ignore + assert len(ispyb_ids.grid_ids) == exp_num_of_grids # type: ignore + assert isinstance(ispyb_ids.data_collection_group_id, int) + + expected_comments = [ + ( + "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 " + "images in 100.0 um by 100.0 um steps. Top left (px): [100,100], bottom right (px): [3300,1700]." + ), + ( + "Hyperion: Xray centring - Diffraction grid scan of 40 by 10 " + "images in 100.0 um by 100.0 um steps. Top left (px): [100,50], bottom right (px): [3300,850]." + ), + ] + + if success: + ispyb.end_deposition(ispyb_ids, "success", "") + else: + ispyb.end_deposition(ispyb_ids, "fail", "In error") + expected_comments = [ + e + " DataCollection Unsuccessful reason: In error" + for e in expected_comments + ] + + assert ( + not isinstance(ispyb_ids.data_collection_ids, int) + and ispyb_ids.data_collection_ids is not None + ) + for grid_no, dc_id in enumerate(ispyb_ids.data_collection_ids): + assert fetch_comment(dc_id) == expected_comments[grid_no] + + +@pytest.mark.s03 +def test_ispyb_deposition_in_gridscan( + RE: RunEngine, + grid_detect_then_xray_centre_composite: GridDetectThenXRayCentreComposite, + grid_detect_then_xray_centre_parameters: GridScanWithEdgeDetect, + fetch_datacollection_attribute: Callable[..., Any], + fetch_datacollection_grid_attribute: Callable[..., Any], + fetch_datacollection_position_attribute: Callable[..., Any], +): + os.environ["ISPYB_CONFIG_PATH"] = CONST.SIM.DEV_ISPYB_DATABASE_CFG + grid_detect_then_xray_centre_composite.s4_slit_gaps.xgap.user_readback.sim_put(0.1) # type: ignore + grid_detect_then_xray_centre_composite.s4_slit_gaps.ygap.user_readback.sim_put(0.1) # type: ignore + ispyb_callback = GridscanISPyBCallback() + RE.subscribe(ispyb_callback) + RE( + grid_detect_then_xray_centre( + grid_detect_then_xray_centre_composite, + grid_detect_then_xray_centre_parameters, + ) + ) + + ispyb_ids = ispyb_callback.ispyb_ids + DC_EXPECTED_VALUES = { + "detectorid": 78, + "axisstart": 0.0, + "axisrange": 0, + "axisend": 0, + "focalspotsizeatsamplex": 0.02, + "focalspotsizeatsampley": 0.02, + "slitgapvertical": 0.1, + "slitgaphorizontal": 0.1, + "beamsizeatsamplex": 0.02, + "beamsizeatsampley": 0.02, + "transmission": 49.118, + "datacollectionnumber": 1, + "detectordistance": 100.0, + "exposuretime": 0.12, + "imagedirectory": "/tmp/", + "imageprefix": "file_name", + "imagesuffix": "h5", + "numberofpasses": 1, + "overlap": 0, + "omegastart": 0, + "startimagenumber": 1, + "wavelength": 0.976254, + "xbeam": 150.0, + "ybeam": 160.0, + "xtalsnapshotfullpath1": "test_1_y", + "xtalsnapshotfullpath2": "test_2_y", + "xtalsnapshotfullpath3": "test_3_y", + "synchrotronmode": "User", + "undulatorgap1": 1.11, + "filetemplate": "file_name_1_master.h5", + "numberofimages": 20 * 12, + } + compare_comment( + fetch_datacollection_attribute, + ispyb_ids.data_collection_ids[0], + "Hyperion: Xray centring - Diffraction grid scan of 20 by 12 " + "images in 20.0 um by 20.0 um steps. Top left (px): [100,161], " + "bottom right (px): [239,244]. Aperture: Small. ", + ) + compare_actual_and_expected( + ispyb_ids.data_collection_ids[0], + DC_EXPECTED_VALUES, + fetch_datacollection_attribute, + DATA_COLLECTION_COLUMN_MAP, + ) + GRIDINFO_EXPECTED_VALUES = { + "gridInfoId": ispyb_ids.grid_ids[0], + "dx_mm": 0.02, + "dy_mm": 0.02, + "steps_x": 20, + "steps_y": 12, + "snapshot_offsetXPixel": 100, + "snapshot_offsetYPixel": 161, + "orientation": "horizontal", + "snaked": True, + "dataCollectionId": ispyb_ids.data_collection_ids[0], + "micronsPerPixelX": 2.87, + "micronsPerPixelY": 2.87, + } + + compare_actual_and_expected( + ispyb_ids.grid_ids[0], + GRIDINFO_EXPECTED_VALUES, + fetch_datacollection_grid_attribute, + GRID_INFO_COLUMN_MAP, + ) + position_id = fetch_datacollection_attribute( + ispyb_ids.data_collection_ids[0], DATA_COLLECTION_COLUMN_MAP["positionid"] + ) + assert position_id is None + DC_EXPECTED_VALUES.update( + { + "axisstart": 90.0, + "axisend": 90.0, + "datacollectionnumber": 2, + "omegastart": 90.0, + "filetemplate": "file_name_2_master.h5", + "numberofimages": 220, + } + ) + compare_actual_and_expected( + ispyb_ids.data_collection_ids[1], + DC_EXPECTED_VALUES, + fetch_datacollection_attribute, + DATA_COLLECTION_COLUMN_MAP, + ) + compare_comment( + fetch_datacollection_attribute, + ispyb_ids.data_collection_ids[1], + "Hyperion: Xray centring - Diffraction grid scan of 20 by 11 " + "images in 20.0 um by 20.0 um steps. Top left (px): [100,165], " + "bottom right (px): [239,241]. Aperture: Small. ", + ) + position_id = fetch_datacollection_attribute( + ispyb_ids.data_collection_ids[1], DATA_COLLECTION_COLUMN_MAP["positionid"] + ) + assert position_id is None + GRIDINFO_EXPECTED_VALUES.update( + { + "gridInfoId": ispyb_ids.grid_ids[1], + "steps_y": 11.0, + "snapshot_offsetYPixel": 165.0, + "dataCollectionId": ispyb_ids.data_collection_ids[1], + } + ) + compare_actual_and_expected( + ispyb_ids.grid_ids[1], + GRIDINFO_EXPECTED_VALUES, + fetch_datacollection_grid_attribute, + GRID_INFO_COLUMN_MAP, + ) + + +@pytest.mark.s03 +def test_ispyb_deposition_in_rotation_plan( + composite_for_rotation_scan: RotationScanComposite, + params_for_rotation_scan: RotationScan, + oav_parameters_for_rotation: OAVParameters, + RE: RunEngine, + fetch_comment: Callable[..., Any], + fetch_datacollection_attribute: Callable[..., Any], + fetch_datacollectiongroup_attribute: Callable[..., Any], + fetch_datacollection_position_attribute: Callable[..., Any], +): + os.environ["ISPYB_CONFIG_PATH"] = CONST.SIM.DEV_ISPYB_DATABASE_CFG + ispyb_cb = RotationISPyBCallback() + RE.subscribe(ispyb_cb) + + RE( + rotation_scan( + composite_for_rotation_scan, + params_for_rotation_scan, + oav_parameters_for_rotation, + ) + ) + + dcid = ispyb_cb.ispyb_ids.data_collection_ids[0] + assert dcid is not None + assert ( + fetch_comment(dcid) + == "Sample position (µm): (1000, 2000, 3000) test Aperture: Small. " + ) + + expected_values = EXPECTED_DATACOLLECTION_FOR_ROTATION | { + "xtalSnapshotFullPath1": "/tmp/snapshot1.png", + "xtalSnapshotFullPath2": "/tmp/snapshot2.png", + "xtalSnapshotFullPath3": "/tmp/snapshot3.png", + "xtalSnapshotFullPath4": "/tmp/snapshot4.png", + } + + compare_actual_and_expected(dcid, expected_values, fetch_datacollection_attribute) + + position_id = fetch_datacollection_attribute( + dcid, DATA_COLLECTION_COLUMN_MAP["positionid"] + ) + expected_values = {"posX": 1.0, "posY": 2.0, "posZ": 3.0} + compare_actual_and_expected( + position_id, expected_values, fetch_datacollection_position_attribute + ) + + +@pytest.mark.s03 +def test_ispyb_deposition_in_rotation_plan_snapshots_in_parameters( + composite_for_rotation_scan: RotationScanComposite, + params_for_rotation_scan: RotationScan, + oav_parameters_for_rotation: OAVParameters, + RE: RunEngine, + fetch_datacollection_attribute: Callable[..., Any], +): + os.environ["ISPYB_CONFIG_PATH"] = CONST.SIM.DEV_ISPYB_DATABASE_CFG + ispyb_cb = RotationISPyBCallback() + RE.subscribe(ispyb_cb) + params_for_rotation_scan.snapshot_omegas_deg = None + params_for_rotation_scan.ispyb_extras.xtal_snapshots_omega_start = [ # type: ignore + "/tmp/test_snapshot1.png", + "/tmp/test_snapshot2.png", + "/tmp/test_snapshot3.png", + ] + RE( + rotation_scan( + composite_for_rotation_scan, + params_for_rotation_scan, + oav_parameters_for_rotation, + ) + ) + + dcid = ispyb_cb.ispyb_ids.data_collection_ids[0] + assert dcid is not None + expected_values = EXPECTED_DATACOLLECTION_FOR_ROTATION | { + "xtalSnapshotFullPath1": "/tmp/test_snapshot1.png", + "xtalSnapshotFullPath2": "/tmp/test_snapshot2.png", + "xtalSnapshotFullPath3": "/tmp/test_snapshot3.png", + } + + compare_actual_and_expected(dcid, expected_values, fetch_datacollection_attribute) + + +def generate_scan_data_infos( + dummy_params, + dummy_scan_data_info_for_begin: ScanDataInfo, + experiment_type: IspybExperimentType, + ispyb_ids: IspybIds, +) -> Sequence[ScanDataInfo]: + xy_scan_data_info = scan_xy_data_info_for_update( + ispyb_ids.data_collection_group_id, dummy_params, dummy_scan_data_info_for_begin + ) + xy_scan_data_info.data_collection_id = ispyb_ids.data_collection_ids[0] + if experiment_type == IspybExperimentType.GRIDSCAN_3D: + scan_data_infos = scan_data_infos_for_update_3d( + ispyb_ids, xy_scan_data_info, dummy_params + ) + else: + scan_data_infos = [xy_scan_data_info] + return scan_data_infos + + +def compare_actual_and_expected( + id, expected_values, fetch_datacollection_attribute, column_map: dict | None = None +): + results = "\n" + for k, v in expected_values.items(): + actual = fetch_datacollection_attribute( + id, column_map[k.lower()] if column_map else k + ) + if isinstance(actual, Decimal): + actual = float(actual) + if isinstance(v, float): + actual_v = actual == pytest.approx(v) + else: + actual_v = actual == v + if not actual_v: + results += f"expected {k} {v} == {actual}\n" + assert results == "\n", results + + +def compare_comment( + fetch_datacollection_attribute, data_collection_id, expected_comment +): + actual_comment = fetch_datacollection_attribute( + data_collection_id, DATA_COLLECTION_COLUMN_MAP["comments"] + ) + match = re.search(" Zocalo processing took", actual_comment) + truncated_comment = actual_comment[: match.start()] if match else actual_comment + assert truncated_comment == expected_comment diff --git a/tests/system_tests/hyperion/external_interaction/test_nexgen.py b/tests/system_tests/hyperion/external_interaction/test_nexgen.py new file mode 100644 index 000000000..3d6247fa0 --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/test_nexgen.py @@ -0,0 +1,173 @@ +import re +import subprocess +from os import environ +from unittest.mock import patch + +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from ....conftest import extract_metafile, raw_params_from_file + +DOCKER = environ.get("DOCKER", "docker") + + +@pytest.fixture +def test_params(tmpdir): + param_dict = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + params = RotationScan(**param_dict) + params.demand_energy_ev = 12700 + params.scan_width_deg = 360 + params.storage_directory = "tests/test_data" + params.x_start_um = 0 + params.y_start_um = 0 + params.z_start_um = 0 + params.exposure_time_s = 0.004 + return params + + +@pytest.mark.parametrize( + "test_data_directory, prefix, reference_file", + [ + ( + "tests/test_data/nexus_files/rotation", + "ins_8_5", + "ins_8_5_expected_output.txt", + ), + ( + "tests/test_data/nexus_files/rotation_unicode_metafile", + "ins_8_5", + "ins_8_5_expected_output.txt", + ), + ], +) +@pytest.mark.s03 +def test_rotation_nexgen( + test_params: RotationScan, + tmpdir, + fake_create_rotation_devices: RotationScanComposite, + test_data_directory, + prefix, + reference_file, +): + meta_file = f"{prefix}_meta.h5.gz" + test_params.file_name = prefix + test_params.storage_directory = f"{tmpdir}" + run_number = test_params.detector_params.run_number + + extract_metafile( + f"{test_data_directory}/{meta_file}", f"{tmpdir}/{prefix}_{run_number}_meta.h5" + ) + + fake_create_rotation_devices.eiger.bit_depth.sim_put(32) # type: ignore + + RE = RunEngine({}) + + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.get_start_and_predicted_end_time", + return_value=("test_time", "test_time"), + ): + RE( + _fake_rotation_scan( + test_params, RotationNexusFileCallback(), fake_create_rotation_devices + ) + ) + + master_file = f"{tmpdir}/{prefix}_{run_number}_master.h5" + _check_nexgen_output_passes_imginfo( + master_file, f"{test_data_directory}/{reference_file}" + ) + + +FILE_PATTERN = re.compile("^ ################# File = (.*)") + +HEADER_PATTERN = re.compile("^ ===== Header information:") + +DATE_PATTERN = re.compile("^ date = (.*)") + + +def _check_nexgen_output_passes_imginfo(test_file, reference_file): + stdout, stderr = _run_imginfo(test_file) + assert stderr == "" + it_actual_lines = iter(stdout.split("\n")) + i = 0 + try: + with open(reference_file) as f: + while True: + i += 1 + expected_line = f.readline().rstrip("\n") + actual_line = next(it_actual_lines) + if FILE_PATTERN.match(actual_line): + continue + if HEADER_PATTERN.match(actual_line): + break + assert ( + actual_line == expected_line + ), f"Header line {i} didn't match contents of {reference_file}: {actual_line} <-> {expected_line}" + + while True: + i += 1 + expected_line = f.readline().rstrip("\n") + actual_line = next(it_actual_lines) + if DATE_PATTERN.match(actual_line): + continue + assert ( + actual_line == expected_line + ), f"Header line {i} didn't match contents of {reference_file}: {actual_line} <-> {expected_line}" + + except StopIteration: + pass + + # assert stdout == expected + + +def _run_imginfo(filename): + process = subprocess.run( + ["utility_scripts/run_imginfo.sh", filename], text=True, capture_output=True + ) + assert process.returncode != 2, "imginfo is not available" + assert ( + process.returncode == 0 + ), f"imginfo failed with returncode {process.returncode}" + + return process.stdout, process.stderr + + +def _fake_rotation_scan( + parameters: RotationScan, + subscription: RotationNexusFileCallback, + rotation_devices: RotationScanComposite, +): + @bpp.subs_decorator(subscription) + @bpp.set_run_key_decorator("rotation_scan_with_cleanup_and_subs") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": parameters.json(), + "activate_callbacks": "RotationNexusFileCallback", + } + ) + def plan(): + yield from read_hardware_during_collection( + rotation_devices.aperture_scatterguard, + rotation_devices.attenuator, + rotation_devices.flux, + rotation_devices.dcm, + rotation_devices.eiger, + ) + + return plan() diff --git a/tests/system_tests/hyperion/external_interaction/test_zocalo_system.py b/tests/system_tests/hyperion/external_interaction/test_zocalo_system.py new file mode 100644 index 000000000..e96f17de0 --- /dev/null +++ b/tests/system_tests/hyperion/external_interaction/test_zocalo_system.py @@ -0,0 +1,158 @@ +import re + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import numpy as np +import pytest +import pytest_asyncio +from bluesky.run_engine import RunEngine +from dodal.devices.zocalo import ZOCALO_READING_PLAN_NAME, ZocaloResults + +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_gridscan_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from tests.conftest import create_dummy_scan_spec + +""" +If fake-zocalo system tests are failing, check that the RMQ instance is set up right: + +- Open the RMQ webpage specified when you start the fake zocalo and login with the +provided credentials + +- go to the admin panel and under the 'exchanges' tab ensure that there is a 'results' +exchange for the zocalo vhost (other settings can be left blank) + +- go to the 'queues and streams' tab and add a binding for the xrc.i03 queue to the +results exchange, with the routing key 'xrc.i03' + +- make sure that there are no un-acked/un-delivered messages on the i03.xrc queue +""" + + +@pytest_asyncio.fixture +async def zocalo_device(): + zd = ZocaloResults() + zd.timeout_s = 10 + await zd.connect() + return zd + + +@bpp.set_run_key_decorator("testing125") +@bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.DO_FGS, + "zocalo_environment": "dev_artemis", + "scan_points": create_dummy_scan_spec(10, 20, 30), + } +) +def fake_fgs_plan(): + yield from bps.sleep(0) + + +@pytest.fixture +def run_zocalo_with_dev_ispyb( + dummy_params: ThreeDGridScan, + dummy_ispyb_3d, + RE: RunEngine, + zocalo_device: ZocaloResults, +): + async def inner(sample_name="", fallback=np.array([0, 0, 0])): + dummy_params.file_name = sample_name + _, ispyb_callback = create_gridscan_callbacks() + ispyb_callback.ispyb_config = dummy_ispyb_3d.ISPYB_CONFIG_PATH + RE.subscribe(ispyb_callback) + + @bpp.set_run_key_decorator("testing123") + def trigger_zocalo_after_fast_grid_scan(): + @bpp.set_run_key_decorator("testing124") + @bpp.stage_decorator([zocalo_device]) + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + CONST.TRIGGER.ZOCALO: CONST.PLAN.DO_FGS, + "hyperion_parameters": dummy_params.json(), + } + ) + def inner_plan(): + yield from fake_fgs_plan() + yield from bps.trigger_and_read( + [zocalo_device], name=ZOCALO_READING_PLAN_NAME + ) + + yield from inner_plan() + + RE( + ispyb_activation_wrapper( + trigger_zocalo_after_fast_grid_scan(), dummy_params + ) + ) + centre = await zocalo_device.centres_of_mass.get_value() + if centre.size == 0: + centre = fallback + else: + centre = centre[0] + + return ispyb_callback, ispyb_callback.emit_cb, centre + + return inner + + +@pytest.mark.s03 +async def test_given_a_result_with_no_diffraction_when_zocalo_called_then_move_to_fallback( + run_zocalo_with_dev_ispyb, zocalo_env +): + fallback = np.array([1, 2, 3]) + _, _, centre = await run_zocalo_with_dev_ispyb("NO_DIFF", fallback) + assert np.allclose(centre, fallback) + + +@pytest.mark.s03 +async def test_given_a_result_with_no_diffraction_ispyb_comment_updated( + run_zocalo_with_dev_ispyb, zocalo_env, fetch_comment +): + ispyb, zc, _ = await run_zocalo_with_dev_ispyb("NO_DIFF") + + comment = fetch_comment(ispyb.ispyb_ids.data_collection_ids[0]) + assert "Zocalo found no crystals in this gridscan." in comment + + +@pytest.mark.s03 +async def test_zocalo_adds_nonzero_comment_time( + run_zocalo_with_dev_ispyb, zocalo_env, fetch_comment +): + ispyb, zc, _ = await run_zocalo_with_dev_ispyb() + + comment = fetch_comment(ispyb.ispyb_ids.data_collection_ids[0]) + match = re.match(r"Zocalo processing took (\d+\.\d+) s", comment) + assert match + time_s = float(match.group(1)) + assert time_s > 0 + assert time_s < 180 + + +@pytest.mark.s03 +async def test_given_a_single_crystal_result_ispyb_comment_updated( + run_zocalo_with_dev_ispyb, zocalo_env, fetch_comment +): + ispyb, zc, _ = await run_zocalo_with_dev_ispyb() + comment = fetch_comment(ispyb.ispyb_ids.data_collection_ids[0]) + assert "Crystal 1" in comment + assert "Strength" in comment + assert "Size (grid boxes)" in comment + + +@pytest.mark.s03 +async def test_given_a_result_with_multiple_crystals_ispyb_comment_updated( + run_zocalo_with_dev_ispyb, zocalo_env, fetch_comment +): + ispyb, zc, _ = await run_zocalo_with_dev_ispyb("MULTI_X") + + comment = fetch_comment(ispyb.ispyb_ids.data_collection_ids[0]) + assert "Crystal 1" and "Crystal 2" in comment + assert "Strength" in comment + assert "Position (grid boxes)" in comment diff --git a/tests/system_tests/hyperion/test_aperturescatterguard_system.py b/tests/system_tests/hyperion/test_aperturescatterguard_system.py new file mode 100644 index 000000000..6e51bb774 --- /dev/null +++ b/tests/system_tests/hyperion/test_aperturescatterguard_system.py @@ -0,0 +1,42 @@ +import pytest +from dodal.common.beamlines.beamline_parameters import ( + BEAMLINE_PARAMETER_PATHS, + GDABeamlineParameters, +) +from dodal.devices.aperturescatterguard import ( + ApertureScatterguard, + load_positions_from_beamline_parameters, + load_tolerances_from_beamline_params, +) +from ophyd_async.core import DeviceCollector + + +@pytest.fixture +def ap_sg(): + params = GDABeamlineParameters.from_file(BEAMLINE_PARAMETER_PATHS["i03"]) + with DeviceCollector(): + ap_sg = ApertureScatterguard( + prefix="BL03S", + name="ap_sg", + loaded_positions=load_positions_from_beamline_parameters(params), + tolerances=load_tolerances_from_beamline_params(params), + ) + return ap_sg + + +@pytest.mark.s03() +def test_aperture_change_callback(ap_sg: ApertureScatterguard): + from bluesky.run_engine import RunEngine + + from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + set_aperture_for_bbox_size, + ) + from mx_bluesky.hyperion.external_interaction.callbacks.aperture_change_callback import ( + ApertureChangeCallback, + ) + + cb = ApertureChangeCallback() + RE = RunEngine({}) + RE.subscribe(cb) + RE(set_aperture_for_bbox_size(ap_sg, [2, 2, 2])) + assert cb.last_selected_aperture == "LARGE_APERTURE" diff --git a/tests/system_tests/hyperion/test_device_setups_and_cleanups.py b/tests/system_tests/hyperion/test_device_setups_and_cleanups.py new file mode 100644 index 000000000..cbec7448b --- /dev/null +++ b/tests/system_tests/hyperion/test_device_setups_and_cleanups.py @@ -0,0 +1,47 @@ +import pytest +from bluesky.run_engine import RunEngine +from dodal.devices.zebra import ( + IN3_TTL, + IN4_TTL, + OR1, + PC_PULSE, + TTL_DETECTOR, + TTL_SHUTTER, + I03Axes, + Zebra, +) + +from mx_bluesky.hyperion.device_setup_plans.setup_zebra import ( + set_zebra_shutter_to_manual, + setup_zebra_for_gridscan, + setup_zebra_for_rotation, +) + + +@pytest.fixture +async def connected_zebra(): + RunEngine() + zebra = Zebra(name="zebra", prefix="BL03S-EA-ZEBRA-01:") + await zebra.connect() + return zebra + + +@pytest.mark.s03 +async def test_zebra_set_up_for_gridscan(RE, connected_zebra: Zebra): + RE(setup_zebra_for_gridscan(connected_zebra, wait=True)) + assert await connected_zebra.output.out_pvs[TTL_DETECTOR].get_value() == IN3_TTL + assert await connected_zebra.output.out_pvs[TTL_SHUTTER].get_value() == IN4_TTL + + +@pytest.mark.s03 +async def test_zebra_set_up_for_rotation(RE, connected_zebra: Zebra): + RE(setup_zebra_for_rotation(connected_zebra, wait=True)) + assert await connected_zebra.pc.gate_trigger.get_value() == I03Axes.OMEGA.value + assert await connected_zebra.pc.gate_width.get_value() == pytest.approx(360, 0.01) + + +@pytest.mark.s03 +async def test_zebra_cleanup(RE, connected_zebra: Zebra): + RE(set_zebra_shutter_to_manual(connected_zebra, wait=True)) + assert await connected_zebra.output.out_pvs[TTL_DETECTOR].get_value() == PC_PULSE + assert await connected_zebra.output.out_pvs[TTL_SHUTTER].get_value() == OR1 diff --git a/tests/test_data/nexus_files/README.md b/tests/test_data/nexus_files/README.md new file mode 100644 index 000000000..ab7b7b4ac --- /dev/null +++ b/tests/test_data/nexus_files/README.md @@ -0,0 +1,12 @@ +Test files for nexus reading +--- +This folder contains test files for the test_nexgen system test. + +Each folder contains: +* A text file with the expected output from `imginfo` +* A gzipped meta file that has been run through the `utility_scripts/strip_metafile.py` program. +* A reference .nxs nexus file that represents expected output for unit tests such as + `test_write_rotation_nexus` in combination with the unzipped metafile + +The metafiles have the `/mask` and `/flatfield` datasets compressed and removed respectively. +The file is then gzipped to compress it further. diff --git a/tests/test_data/nexus_files/fake_data.h5 b/tests/test_data/nexus_files/fake_data.h5 new file mode 100644 index 000000000..afd7b3ac4 Binary files /dev/null and b/tests/test_data/nexus_files/fake_data.h5 differ diff --git a/tests/test_data/nexus_files/rotation/ins_8_5.nxs b/tests/test_data/nexus_files/rotation/ins_8_5.nxs new file mode 100644 index 000000000..d419919cd Binary files /dev/null and b/tests/test_data/nexus_files/rotation/ins_8_5.nxs differ diff --git a/tests/test_data/nexus_files/rotation/ins_8_5_expected_output.txt b/tests/test_data/nexus_files/rotation/ins_8_5_expected_output.txt new file mode 100644 index 000000000..fa3d339ad --- /dev/null +++ b/tests/test_data/nexus_files/rotation/ins_8_5_expected_output.txt @@ -0,0 +1,68 @@ + + ========================================================================== + + Copyright 2007, 2024 Global Phasing Ltd + + Author (2007-2024) Clemens Vonrhein, Claus Flensburg, Thomas Womack, Gerard Bricogne + + + Contains HDF5 library: + + HDF5 (Hierarchical Data Format 5) Software Library and Utilities + Copyright 2006-2015 by The HDF Group. + + NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities + Copyright 1998-2006 by the Board of Trustees of the University of Illinois. + + Contains also: + + Bitshuffle - Filter for improving compression of typed binary data. + Copyright (c) 2014 Kiyoshi Masui (kiyo@physics.ubc.ca) + + LZ4 - Fast LZ compression algorithm. + Copyright (C) 2011-2012, Yann Collet. + + LZ4/HDF5 FILTER IMPLEMENTATION. + Copyright (C) 2011-2013, Dectris Ltd. + + HDF5 filter plugin adopted from PyTables. + Copyright (c) 2002-2004 by Francesc Alted + Copyright (c) 2005-2007 by Carabos Coop. V. + Copyright (c) 2008-2010 by Francesc Alted + + For full information, please run with "-fullinfo" flag. + + ========================================================================== + + + + ################# File = /data/ins_8_5_0_master.h5 + + >>> Image format detected as HDF5/Eiger + rotation axis = "OMEGA" + + Sweep-1 : + from image 1 : Omega= 0.000 .. 0.100 Kappa= 0.000 .. 0.000 Chi= 23.850 .. 23.850 Phi= 0.470 .. 0.470 2-Theta= 0.000 .. 0.000 + to image 3600 : Omega= 359.900 .. 360.000 Kappa= 0.000 .. 0.000 Chi= 23.850 .. 23.850 Phi= 0.470 .. 0.470 2-Theta= 0.000 .. 0.000 + Image number 1/3600 + + + ===== Header information: + exposure time [seconds] = 0.00400 + distance [mm] = 100.000 + wavelength [A] = 0.976254 + sensor thickness [mm] = 0.450 + sensor material = Si + Phi-angle [degree] = 0.00000 + Omega-angle (start, end) [degree] = 0.00000 0.10000 + Oscillation-angle in Omega [degree] = 0.10000 + Chi-angle [degree] = 23.85000 + Pixel size in X [mm] = 0.075000 + Pixel size in Y [mm] = 0.075000 + Number of pixels in X = 4148 + Number of pixels in Y = 4362 + Beam centre in X [mm] = 150.000 + Beam centre in X [pixel] = 2000.000 + Beam centre in Y [mm] = 160.000 + Beam centre in Y [pixel] = 2133.333 + Overload value = 46051 diff --git a/tests/test_data/nexus_files/rotation/ins_8_5_meta.h5.gz b/tests/test_data/nexus_files/rotation/ins_8_5_meta.h5.gz new file mode 100644 index 000000000..3ec379dbc Binary files /dev/null and b/tests/test_data/nexus_files/rotation/ins_8_5_meta.h5.gz differ diff --git a/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5.nxs b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5.nxs new file mode 100644 index 000000000..d419919cd Binary files /dev/null and b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5.nxs differ diff --git a/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_expected_output.txt b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_expected_output.txt new file mode 100644 index 000000000..fa3d339ad --- /dev/null +++ b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_expected_output.txt @@ -0,0 +1,68 @@ + + ========================================================================== + + Copyright 2007, 2024 Global Phasing Ltd + + Author (2007-2024) Clemens Vonrhein, Claus Flensburg, Thomas Womack, Gerard Bricogne + + + Contains HDF5 library: + + HDF5 (Hierarchical Data Format 5) Software Library and Utilities + Copyright 2006-2015 by The HDF Group. + + NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities + Copyright 1998-2006 by the Board of Trustees of the University of Illinois. + + Contains also: + + Bitshuffle - Filter for improving compression of typed binary data. + Copyright (c) 2014 Kiyoshi Masui (kiyo@physics.ubc.ca) + + LZ4 - Fast LZ compression algorithm. + Copyright (C) 2011-2012, Yann Collet. + + LZ4/HDF5 FILTER IMPLEMENTATION. + Copyright (C) 2011-2013, Dectris Ltd. + + HDF5 filter plugin adopted from PyTables. + Copyright (c) 2002-2004 by Francesc Alted + Copyright (c) 2005-2007 by Carabos Coop. V. + Copyright (c) 2008-2010 by Francesc Alted + + For full information, please run with "-fullinfo" flag. + + ========================================================================== + + + + ################# File = /data/ins_8_5_0_master.h5 + + >>> Image format detected as HDF5/Eiger + rotation axis = "OMEGA" + + Sweep-1 : + from image 1 : Omega= 0.000 .. 0.100 Kappa= 0.000 .. 0.000 Chi= 23.850 .. 23.850 Phi= 0.470 .. 0.470 2-Theta= 0.000 .. 0.000 + to image 3600 : Omega= 359.900 .. 360.000 Kappa= 0.000 .. 0.000 Chi= 23.850 .. 23.850 Phi= 0.470 .. 0.470 2-Theta= 0.000 .. 0.000 + Image number 1/3600 + + + ===== Header information: + exposure time [seconds] = 0.00400 + distance [mm] = 100.000 + wavelength [A] = 0.976254 + sensor thickness [mm] = 0.450 + sensor material = Si + Phi-angle [degree] = 0.00000 + Omega-angle (start, end) [degree] = 0.00000 0.10000 + Oscillation-angle in Omega [degree] = 0.10000 + Chi-angle [degree] = 23.85000 + Pixel size in X [mm] = 0.075000 + Pixel size in Y [mm] = 0.075000 + Number of pixels in X = 4148 + Number of pixels in Y = 4362 + Beam centre in X [mm] = 150.000 + Beam centre in X [pixel] = 2000.000 + Beam centre in Y [mm] = 160.000 + Beam centre in Y [pixel] = 2133.333 + Overload value = 46051 diff --git a/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_meta.h5.gz b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_meta.h5.gz new file mode 100644 index 000000000..070b64f62 Binary files /dev/null and b/tests/test_data/nexus_files/rotation_unicode_metafile/ins_8_5_meta.h5.gz differ diff --git a/tests/test_data/parameter_json_files/good_test_grid_with_edge_detect_parameters.json b/tests/test_data/parameter_json_files/good_test_grid_with_edge_detect_parameters.json new file mode 100644 index 000000000..9b5a16ee5 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_grid_with_edge_detect_parameters.json @@ -0,0 +1,19 @@ +{ + "parameter_model_version": "5.0.0", + "beamline": "BL03S", + "insertion_prefix": "SR03S", + "zocalo_environment": "devrmq", + "storage_directory": "/tmp", + "file_name": "file_name", + "run_number": 0, + "sample_id": 123456, + "use_roi_mode": false, + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "exposure_time_s": 0.1, + "detector_distance_mm": 100.0, + "omega_start_deg": 0.0, + "grid_width_um": 290.6, + "oav_centring_file": "tests/test_data/test_OAVCentring.json", + "transmission_frac": 1.0, + "visit": "cm31105-4" +} diff --git a/tests/test_data/parameter_json_files/good_test_multi_rotation_scan_parameters.json b/tests/test_data/parameter_json_files/good_test_multi_rotation_scan_parameters.json new file mode 100644 index 000000000..43d9512e6 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_multi_rotation_scan_parameters.json @@ -0,0 +1,50 @@ +{ + "parameter_model_version": "5.0.0", + "comment": "test", + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/auto/123456/", + "detector_distance_mm": 100.0, + "demand_energy_ev": 100, + "exposure_time_s": 0.1, + "insertion_prefix": "SR03S", + "file_name": "file_name", + "run_number": 0, + "sample_id": 123456, + "shutter_opening_time_s": 0.6, + "visit": "cm31105-4", + "zocalo_environment": "dev_artemis", + "transmission_frac": 0.1, + "rotation_increment_deg": 0.1, + "selected_aperture": "SMALL_APERTURE", + "rotation_scans": [{ + "rotation_axis": "omega", + "rotation_direction": "Negative", + "scan_width_deg": 180.0, + "omega_start_deg": 0.0, + "phi_start_deg": 0.47, + "chi_start_deg": 23.85, + "x_start_um": 1.0, + "y_start_um": 2.0, + "z_start_um": 3.0 + },{ + "rotation_axis": "omega", + "rotation_direction": "Negative", + "scan_width_deg": 90.0, + "omega_start_deg": 180.0, + "phi_start_deg": 0.47, + "chi_start_deg": 4.7, + "x_start_um": 3.0, + "y_start_um": 2.0, + "z_start_um": 1.0 + },{ + "rotation_axis": "omega", + "rotation_direction": "Positive", + "scan_width_deg": 360.0, + "omega_start_deg": 270.0, + "phi_start_deg": 0.47, + "chi_start_deg": 45, + "x_start_um": 6.0, + "y_start_um": 7.0, + "z_start_um": 8.0 + }] +} diff --git a/tests/test_data/parameter_json_files/good_test_parameters.json b/tests/test_data/parameter_json_files/good_test_parameters.json new file mode 100644 index 000000000..d3e5f2d32 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_parameters.json @@ -0,0 +1,29 @@ +{ + "parameter_model_version": "5.0.0", + "demand_energy_ev": 100, + "comment": "test", + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "detector_distance_mm": 100.0, + "visit": "cm31105-4", + "exposure_time_s": 0.1, + "insertion_prefix": "SR03S", + "omega_start_deg": 0.0, + "file_name": "file_name", + "sample_id": 123456, + "run_number": 0, + "use_roi_mode": false, + "transmission_frac": 1.0, + "zocalo_environment": "dev_artemis", + "x_steps": 40, + "y_steps": 20, + "z_steps": 10, + "x_step_size_um": 0.1, + "y_step_size_um": 0.1, + "z_step_size_um": 0.1, + "x_start_um": 0.0, + "y_start_um": 0.0, + "y2_start_um": 0.0, + "z_start_um": 0.0, + "z2_start_um": 0.0, + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/xraycentring/123456/" +} diff --git a/tests/test_data/parameter_json_files/good_test_pin_centre_then_xray_centre_parameters.json b/tests/test_data/parameter_json_files/good_test_pin_centre_then_xray_centre_parameters.json new file mode 100644 index 000000000..d013ee482 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_pin_centre_then_xray_centre_parameters.json @@ -0,0 +1,20 @@ +{ + "parameter_model_version": "5.0.0", + "beamline": "BL03S", + "insertion_prefix": "SR03S", + "zocalo_environment": "devrmq", + "storage_directory": "/tmp", + "file_name": "file_name", + "run_number": 0, + "sample_id": 123456, + "use_roi_mode": false, + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "exposure_time_s": 0.1, + "detector_distance_mm": 100.0, + "omega_start_deg": 0.0, + "tip_offset_um": 108.9, + "grid_width_um": 290.6, + "oav_centring_file": "tests/test_data/test_OAVCentring.json", + "transmission_frac": 1.0, + "visit": "cm31105-4" +} diff --git a/tests/test_data/parameter_json_files/good_test_robot_load_params.json b/tests/test_data/parameter_json_files/good_test_robot_load_params.json new file mode 100644 index 000000000..c5e63125a --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_robot_load_params.json @@ -0,0 +1,21 @@ +{ + "parameter_model_version": "5.0.0", + "zocalo_environment": "dev_artemis", + "beamline": "BL03S", + "insertion_prefix": "SR03S", + "storage_directory": "/tmp/", + "visit": "cm31105-4", + "file_name": "file_name", + "run_number": 0, + "use_roi_mode": false, + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "omega_start_deg": 0, + "transmission_frac": 1.0, + "exposure_time_s": 0.004, + "detector_distance_mm": 255, + "demand_energy_ev": 11100, + "sample_id": 12345, + "sample_puck": 40, + "sample_pin": 3, + "comment": "Descriptive comment." +} diff --git a/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json b/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json new file mode 100644 index 000000000..24ac8afc2 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json @@ -0,0 +1,36 @@ +{ + "parameter_model_version": "5.0.0", + "comment": "test", + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/auto/123456/", + "detector_distance_mm": 100.0, + "demand_energy_ev": 100, + "exposure_time_s": 0.1, + "insertion_prefix": "SR03S", + "omega_start_deg": 0.0, + "file_name": "file_name", + "scan_width_deg": 180.0, + "rotation_axis": "omega", + "rotation_direction": "Negative", + "rotation_increment_deg": 0.1, + "run_number": 0, + "sample_id": 123456, + "shutter_opening_time_s": 0.6, + "visit": "cm31105-4", + "zocalo_environment": "dev_artemis", + "transmission_frac": 0.1, + "phi_start_deg": 0.47, + "chi_start_deg": 23.85, + "x_start_um": 1.0, + "y_start_um": 2.0, + "z_start_um": 3.0, + "selected_aperture": "SMALL_APERTURE", + "snapshot_omegas_deg": [0, 90, 180, 270], + "ispyb_extras": { + "xtal_snapshots_omega_start": [ + "test_1_y", + "test_2_y", + "test_3_y" + ] + } +} diff --git a/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters_nomove.json b/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters_nomove.json new file mode 100644 index 000000000..446c53a68 --- /dev/null +++ b/tests/test_data/parameter_json_files/good_test_rotation_scan_parameters_nomove.json @@ -0,0 +1,30 @@ +{ + "parameter_model_version": "5.0.0", + "comment": "test", + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/auto/123456/", + "detector_distance_mm": 100.0, + "demand_energy_ev": 100, + "exposure_time_s": 0.1, + "insertion_prefix": "SR03S", + "omega_start_deg": 0.0, + "file_name": "file_name", + "scan_width_deg": 180.0, + "rotation_axis": "omega", + "rotation_direction": "Negative", + "rotation_increment_deg": 0.1, + "run_number": 0, + "sample_id": 123456, + "shutter_opening_time_s": 0.6, + "visit": "cm31105-4", + "zocalo_environment": "dev_artemis", + "transmission_frac": 0.1, + "selected_aperture": "LARGE_APERTURE", + "ispyb_extras": { + "xtal_snapshots_omega_start": [ + "test_1_y", + "test_2_y", + "test_3_y" + ] + } +} diff --git a/tests/test_data/parameter_json_files/ispyb_gridscan_system_test_parameters.json b/tests/test_data/parameter_json_files/ispyb_gridscan_system_test_parameters.json new file mode 100644 index 000000000..9b8257726 --- /dev/null +++ b/tests/test_data/parameter_json_files/ispyb_gridscan_system_test_parameters.json @@ -0,0 +1,20 @@ +{ + "parameter_model_version": "5.0.0", + "beamline": "BL03S", + "insertion_prefix": "SR03S", + "zocalo_environment": "dev_artemis", + "storage_directory": "/tmp", + "file_name": "file_name", + "run_number": 0, + "sample_id": 123456, + "use_roi_mode": false, + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "exposure_time_s": 0.12, + "detector_distance_mm": 100.0, + "omega_start_deg": 0.0, + "grid_width_um": 400, + "oav_centring_file": "tests/test_data/test_OAVCentring.json", + "transmission_frac": 0.49118, + "visit": "cm31105-4", + "demand_energy_ev": 12700 +} diff --git a/tests/test_data/parameter_json_files/test_gridscan_param_defaults.json b/tests/test_data/parameter_json_files/test_gridscan_param_defaults.json new file mode 100644 index 000000000..6dc36d13e --- /dev/null +++ b/tests/test_data/parameter_json_files/test_gridscan_param_defaults.json @@ -0,0 +1,30 @@ +{ + "parameter_model_version": "5.0.0", + "zocalo_environment": "dev_artemis", + "beamline": "BL03S", + "insertion_prefix": "SR03S", + "demand_energy_ev": 100, + "storage_directory": "/tmp/", + "visit": "cm31105-4", + "file_name": "file_name", + "sample_id": 364758, + "run_number": 0, + "comment": "Descriptive comment.", + "use_roi_mode": false, + "det_dist_to_beam_converter_path": "tests/test_data/test_lookup_table.txt", + "transmission_frac": 1.0, + "x_steps": 40, + "y_steps": 20, + "z_steps": 10, + "x_step_size_um": 0.1, + "y_step_size_um": 0.1, + "z_step_size_um": 0.1, + "x_start_um": 0.0, + "y_start_um": 0.0, + "y2_start_um": 0.0, + "z_start_um": 0.0, + "z2_start_um": 0.0, + "detector_distance_mm": 100.0, + "omega_start_deg": 0.0, + "exposure_time_s": 0.1 +} diff --git a/tests/test_data/parameter_json_files/test_oav_snapshot_params.json b/tests/test_data/parameter_json_files/test_oav_snapshot_params.json new file mode 100644 index 000000000..df9a7d19b --- /dev/null +++ b/tests/test_data/parameter_json_files/test_oav_snapshot_params.json @@ -0,0 +1,5 @@ +{ + "parameter_model_version": "5.0.0", + "snapshot_directory": "/tmp/my_snapshots", + "snapshot_omegas_deg": [0, 90, 180, 270] +} diff --git a/tests/test_data/scratch/README b/tests/test_data/scratch/README new file mode 100644 index 000000000..15450c9af --- /dev/null +++ b/tests/test_data/scratch/README @@ -0,0 +1 @@ +This directory exists for tests to temporarily write files. Anything in here except this readme can be safely deleted. diff --git a/tests/test_data/test_OAVCentring.json b/tests/test_data/test_OAVCentring.json new file mode 100644 index 000000000..96cb1b36a --- /dev/null +++ b/tests/test_data/test_OAVCentring.json @@ -0,0 +1,69 @@ +{ + "exposure": 0.075, + "acqPeriod": 0.05, + "gain": 1.0, + "minheight": 70, + "oav": "OAV", + "mxsc_input": "CAM", + "min_callback_time": 0.080, + "close_ksize": 11, + "direction": 0, + "pinTipCentring": { + "zoom": 1.0, + "preprocess": 8, + "preProcessKSize": 21, + "CannyEdgeUpperThreshold": 20.0, + "CannyEdgeLowerThreshold": 5.0, + "brightness": 20, + "max_tip_distance": 300, + "mxsc_input": "proc", + "minheight": 10, + "min_callback_time": 0.15, + "filename": "/dls_sw/prod/R3.14.12.7/support/adPython/2-1-11/adPythonApp/scripts/adPythonMxSampleDetect.py" + }, + "loopCentring": { + "zoom": 5.0, + "preprocess": 8, + "preProcessKSize": 21, + "CannyEdgeUpperThreshold": 20.0, + "CannyEdgeLowerThreshold": 5.0, + "brightness": 20, + "filename": "/dls_sw/prod/R3.14.12.7/support/adPython/2-1-11/adPythonApp/scripts/adPythonMxSampleDetect.py", + "max_tip_distance": 300 + }, + "xrayCentring": { + "zoom": 7.5, + "preprocess": 8, + "preProcessKSize": 31, + "CannyEdgeUpperThreshold": 30.0, + "CannyEdgeLowerThreshold": 5.0, + "close_ksize": 3, + "filename": "/dls_sw/prod/R3.14.12.7/support/adPython/2-1-11/adPythonApp/scripts/adPythonMxSampleDetect.py", + "brightness": 80 + }, + "rotationAxisAlign": { + "zoom": 10.0, + "preprocess": 8, + "preProcessKSize": 21, + "CannyEdgeUpperThreshold": 20.0, + "CannyEdgeLowerThreshold": 5.0, + "filename": "/dls_sw/prod/R3.14.12.7/support/adPython/2-1-11/adPythonApp/scripts/adPythonMxSampleDetect.py", + "brightness": 100 + }, + "SmargonOffsets1": { + "zoom": 1.0, + "preprocess": 8, + "preProcessKSize": 21, + "CannyEdgeUpperThreshold": 50.0, + "CannyEdgeLowerThreshold": 5.0, + "brightness": 80 + }, + "SmargonOffsets2": { + "zoom": 5.0, + "preprocess": 8, + "preProcessKSize": 11, + "CannyEdgeUpperThreshold": 50.0, + "CannyEdgeLowerThreshold": 5.0, + "brightness": 90 + } +} diff --git a/tests/test_data/test_beamline_dcm_pitch_converter.txt b/tests/test_data/test_beamline_dcm_pitch_converter.txt new file mode 100644 index 000000000..b4666f489 --- /dev/null +++ b/tests/test_data/test_beamline_dcm_pitch_converter.txt @@ -0,0 +1,24 @@ +# Bragg pitch +# Degree values for pitch are interpreted as mrad +# The values cannot change direction. +# last update 2023/06/26 NP +Units Deg mrad +19.24347 -0.80190 +16.40945 -0.79115 +14.31123 -0.78236 +12.69280 -0.77737 +11.40541 -0.77541 +10.35649 -0.77356 +9.48509 -0.77077 +8.95818 -0.76853 +8.74947 -0.76843 +8.12012 -0.76843 +7.57547 -0.76640 +7.09945 -0.76389 +6.67985 -0.76222 +6.30717 -0.76044 +5.97397 -0.76044 +5.67425 -0.75981 +5.40325 -0.75949 +5.15693 -0.75949 +4.93212 -0.7582 diff --git a/tests/test_data/test_beamline_dcm_roll_converter.txt b/tests/test_data/test_beamline_dcm_roll_converter.txt new file mode 100644 index 000000000..0368da30b --- /dev/null +++ b/tests/test_data/test_beamline_dcm_roll_converter.txt @@ -0,0 +1,10 @@ +#Bragg angle against roll( absolute number) +#reloadLookupTables() +# last update 2023/01/19 NP +Units Deg mrad +# 16.4095 -0.2885 +# 6.3075 -0.2885 +2.0 1.0 +4.0 2.0 +5.0 4.0 +5.5 8.0 diff --git a/tests/test_data/test_beamline_parameters.txt b/tests/test_data/test_beamline_parameters.txt new file mode 100644 index 000000000..5247b15d3 --- /dev/null +++ b/tests/test_data/test_beamline_parameters.txt @@ -0,0 +1,298 @@ +# +# +BeamLine BL03I + +## BLSE=FB switches between scan alignment and feedback alignment +## by creating bl energy scannable with beamLineSpecificEnergy_FB +## after changing you must restart servers or >>> reset_namespace +BLSE=FB + +## BPFB (Beam Position FeedBack) +## HALF (default) only off during data collection +## FULL only off for XBPM2 during attenuation optimisation, fluo when trans < 2% and wedged MAD +## UNAVAILABLE (not default) prevents xbpm_feedback.py trying to access EPICS IOC that may not be running +BPFB=FULL +## Note: only beamline scientists control whether feedback is enabled +## via the XBPM feedback EDM screen in Synoptic + +# DCM parameters +DCM_Perp_Offset_FIXED = 25.6 +# +# beamstop +# +parked_x = 4.49 +parked_y = -50.0 +parked_y_plate = -50.5 +parked_z = -49.5 +parked_z_robot = 30.0 + +in_beam_z_MIN_START_POS = 60.0 + +in_beam_x_HIGHRES = 1.52 +in_beam_y_HIGHRES = 44.78 +in_beam_z_HIGHRES = 30.0 + +in_beam_x_STANDARD = 1.52 +in_beam_y_STANDARD = 44.78 +in_beam_z_STANDARD = 30.0 + +in_beam_x_LOWRES = 1.52 +in_beam_y_LOWRES = 44.78 +in_beam_z_LOWRES = 48 + +checkCryojet = No +#If is to be moved in by the script. If not Yes then control is handed to the robot on activate script +#To force the cryojet run hutch_utilities.hutch.forceCryoOut() +manualCryojet = Yes + +######################################################### +############# All these need checking! ############ +######################################################### + +#Aperture - Scatterguard positions +# 100 micron ap +miniap_x_LARGE_APERTURE = 2.389 +miniap_y_LARGE_APERTURE = 40.986 +miniap_z_LARGE_APERTURE = 15.8 + +sg_x_LARGE_APERTURE = 5.25 +sg_y_LARGE_APERTURE = 4.43 + +# 50 micron ap +miniap_x_MEDIUM_APERTURE = 2.384 +miniap_y_MEDIUM_APERTURE = 44.967 +miniap_z_MEDIUM_APERTURE = 15.8 +sg_x_MEDIUM_APERTURE = 5.285 +sg_y_MEDIUM_APERTURE = 0.46 + +# 20 micron ap +miniap_x_SMALL_APERTURE = 2.430 +miniap_y_SMALL_APERTURE = 48.974 +miniap_z_SMALL_APERTURE = 15.8 +sg_x_SMALL_APERTURE = 5.3375 +sg_y_SMALL_APERTURE = -3.55 + +# Robot load +miniap_x_ROBOT_LOAD = 2.386 +miniap_y_ROBOT_LOAD = 31.40 +miniap_z_ROBOT_LOAD = 15.8 +sg_x_ROBOT_LOAD = 5.25 +sg_y_ROBOT_LOAD = 4.43 + +# manual mount +miniap_x_MANUAL_LOAD = -4.91 +miniap_y_MANUAL_LOAD = -49.0 +miniap_z_MANUAL_LOAD = -10.0 + +sg_x_MANUAL_LOAD = -4.7 +sg_y_MANUAL_LOAD = 1.8 + +miniap_x_SCIN_MOVE = -4.91 +# prion setting +#miniap_x_SCIN_MOVE = 0.0 +sg_x_SCIN_MOVE = -4.75 + +scin_y_SCIN_IN = 100.855 +scin_y_SCIN_OUT = -0.02 +scin_z_SCIN_IN = 101.5115 + + +scin_z_SCIN_OUT = 0.1 + +#distance to move gonx,y,z when scintillator is put in with standard pins +# For old gonio: +gon_x_SCIN_OUT_DISTANCE = 1.0 +# For SmarGon: +gon_x_SCIN_OUT_DISTANCE_smargon = 1 + +gon_y_SCIN_OUT_DISTANCE = 2.0 +gon_z_SCIN_OUT_DISTANCE = -0.5 + +#CASS motor position tolerances (mm) +miniap_x_tolerance = 0.004 +miniap_y_tolerance = 0.1 +miniap_z_tolerance = 0.1 +sg_x_tolerance = 0.1 +sg_y_tolerance = 0.1 +scin_y_tolerance = 0.1 +scin_z_tolerance = 0.12 +gon_x_tolerance = 0.01 +gon_y_tolerance = 0.1 +gon_z_tolerance = 0.001 +bs_x_tolerance = 0.02 +bs_y_tolerance = 0.005 +bs_z_tolerance = 0.3 +crl_x_tolerance = 0.01 +crl_y_tolerance = 0.01 +crl_pitch_tolerance = 0.01 +crl_yaw_tolerance = 0.01 +sg_y_up_movement_tolerance = 1.0 + +sg_x_timeout = 10 +sg_y_timeout = 10 +miniap_x_timeout = 60 +miniap_y_timeout = 10 +gon_x_timeout = 60 +gon_y_timeout = 30 +gon_z_timeout = 30 +crl_x_timeout = 10 +crl_y_timeout = 10 +crl_pitch_timeout = 10 +crl_yaw_timeout = 10 + +col_inbeam_tolerance = 1.0 + +# robot load collimation table reference positions (mm) +col_parked_tolerance = 1.0 +col_parked_upstream_x = 0.0 +col_parked_downstream_x = 0.0 +col_parked_upstream_y = 0.0 +col_parked_inboard_y = 0.0 +col_parked_outboard_y = 0.0 + +## CRL positions for low and high energy lens sets. Should deliver beam to same position on scintillator. +## Normally should only adjust the low energy set to match the position of the high energy that you've +## already checked on the scintillator screen. + +crl_x_LOWE = -11.78 +crl_y_LOWE = -4.3 +crl_pitch_LOWE = -4.75 +crl_yaw_LOWE = -1.0 + +crl_x_HIGHE = 2.22 +crl_y_HIGHE = -4.30 +crl_pitch_HIGHE = -2.75 +crl_yaw_HIGHE = 0 + + +######################################################### +########## End of new parameters ########### +######################################################### + + +#Beam visualisation parameters +MinBackStopZ = 30.0 +BackStopYsafe = 20.0 +BackStopXyag = -4.8 +BackStopYyag = 17.20 +BackStopZyag = 19.1 +SampleYnormal = 2.65 +SampleYshift = 2.0 +parked_fluo_x = -18.0 +in_beam_fluo_x = 12.0 +move_fluo = Yes +safe_det_z_default = 900 +safe_det_z_sampleChanger = 337 +store_data_collections_in_ispyb = Yes +TakePNGsOfSample = Yes + +#robot requires these values +gonio_parked_x = 0.0 +gonio_parked_y = 0.0 +gonio_parked_z = 0.0 +gonio_parked_omega = 0 +gonio_parked_chi = 0 +gonio_parked_phi = 0 + +# The following used by setupBeamLine script +setupBeamLine_energyStart = 7000.0 +setupBeamLine_energyEnd = 17000.0 +setupBeamLine_energyStep = 500 +setupBeamLine_rollStart = -4 +setupBeamLine_rollEnd = 4 +setupBeamLine_rollSteps = 21 +setupBeamLine_pitchStart = -3.7 +setupBeamLine_pitchEnd = -3.5 +setupBeamLine_pitchSteps = 200 +#values below in microns +beamXCentre = 0 +beamYCentre = 0 +beamXYSettleTime = 6.0 +beamXYTolerance = 5.0 +DataCollection_TurboMode = Yes +#time in seconds. If not set then the default is 0.1 + +#The following are used by beamLineenergy script +beamLineEnergy_rollBeamX 50 +beamLineEnergy_rollBeamY 200 +beamLineEnergy__rollWidth = .2 +beamLineEnergy__rollStep = .02 +beamLineEnergy__pitchWidth = .02 +beamLineEnergy__pitchStep = .002 +beamLineEnergy__fpitchWidth = .02 +beamLineEnergy__fpitchStep = .001 +beamLineEnergy__adjustSlits = No +#dataCollectionMinSampleCurrent = 0.245 +dataCollectionMinSampleCurrent = 0.000 +dataCollectionSampleCurrent qbpm3 + +#Mark is using the following in some test scripts +MinIPin = 1.0 +YAGPin = 1 +RotationAxisPin = 2 +PtPin = 3 +PowderPin = 4 + +iPinInDetZ = 340.0 + +DataCollectionDetX = -7.8504 +DataCollectionDetYaw = 6.499 +DataCollectionDetY = 48.0 + +# StandardEnergy on i03 is 12700eV +StandardEnergy = 12700 + +keyence_max_attempts = 1 +# Move gonio 100 microns, see difference in keyence values +# Then do 100/difference, put that number below +# Sign may change between Smargon and MiniKappa +keyence_slopeYToX = 2.5 +keyence_slopeYToY = -2.5 +keyence_slopeXToZ = 3.23 + +YAGSamX = 1022 +YAGSamY = -98.0 +YAGSamZ = -147 +YAGOmega = 0.0 + +#ipin value must be < ipin_threshold above background for data collection +ipin_threshold = 0.1 + +# energy thresholds for mirror stripes +# - first threshold is between bare/Rh stripes (e.g. 7000) +# - second threshold is between Rh/Pt stripes (e.g. 18000) +mirror_threshold_bare_rh = 6900 +mirror_threshold_rh_pt = 30000 + +# flux conversion factors +flux_factor_no_aperture = 1 +flux_factor_LARGE_APERTURE = 0.738 +flux_factor_MEDIUM_APERTURE = 0.36 +flux_factor_SMALL_APERTURE = 0.084 +flux_factor_no_aperture_plate = 1 +flux_factor_LARGE_APERTURE_plate = 0.738 +flux_factor_MEDIUM_APERTURE_plate = 0.36 +flux_factor_SMALL_APERTURE_plate = 0.084 + +# assuming gain 10^3 +pin_diode_factor = 2.66E19 + +# Fluorescence/Vortex detector settings +attenuation_optimisation_type = deadtime # deadtime or total_counts + +#Deadtime settings +fluorescence_analyser_deadtimeThreshold=0.002 # used by edge scans +fluorescence_spectrum_deadtimeThreshold=0.0005 # used by spectrum + +#Other settings +fluorescence_attenuation_low_roi = 100 +fluorescence_attenuation_high_roi = 2048 +attenuation_optimisation_optimisation_cycles = 10 +attenuation_optimisation_start_transmission = 0.1 # per cent +fluorescence_mca_sca_offset = 400 + +#Total count settings +attenuation_optimisation_multiplier = 2 +attenuation_optimisation_target_count = 2000 +attenuation_optimisation_upper_limit = 50000 +attenuation_optimisation_lower_limit = 20000 diff --git a/tests/test_data/test_beamline_vfm_lat_converter.txt b/tests/test_data/test_beamline_vfm_lat_converter.txt new file mode 100755 index 000000000..eaa143b5f --- /dev/null +++ b/tests/test_data/test_beamline_vfm_lat_converter.txt @@ -0,0 +1,5 @@ +# DCM Bragg angle to VFM x translation lookup +# The values cannot change direction. +Units Deg Deg +5.0 10.0 +25.0 10.0 diff --git a/tests/test_data/test_config.cfg b/tests/test_data/test_config.cfg new file mode 100644 index 000000000..f7849e93c --- /dev/null +++ b/tests/test_data/test_config.cfg @@ -0,0 +1,27 @@ +[ispyb_mysql_sp] +user = ispyb_api +pw = notapassword +host = computer-somewhere +port = 3306 +db = ispyb +conn_inactivity = 360 + +[ispyb_mariadb_sp] +user = ispyb_api +pw = notapassword +host = computer-somewhere +port = 3306 +db = ispyb +reconn_attempts = 6 +reconn_delay = 1 + +[ispyb_sqlalchemy] +username = ispyb_zocalo +password = notapassword +host = computer-somewhere +port = 3306 +database = ispyb + +[expeye] +url = http://blah +token = notatoken diff --git a/tests/test_data/test_daq_configuration/domain/beamlineParameters b/tests/test_data/test_daq_configuration/domain/beamlineParameters new file mode 100644 index 000000000..0a783cd2f --- /dev/null +++ b/tests/test_data/test_daq_configuration/domain/beamlineParameters @@ -0,0 +1,139 @@ +# +# +BeamLine BL03S + +## Test data for device instantiation +BLSE=FB + +## BPFB (Beam Position FeedBack) +## HALF (default) only off during data collection +## FULL only off for XBPM2 during attenuation optimisation, fluo when trans < 2% and wedged MAD +## UNAVAILABLE (not default) prevents xbpm_feedback.py trying to access EPICS IOC that may not be running +BPFB=FULL +## Note: only beamline scientists control whether feedback is enabled +## via the XBPM feedback EDM screen in Synoptic + +# DCM parameters +DCM_Perp_Offset_FIXED = 25.6 +# +# beamstop +# +parked_x = 4.49 +parked_y = -50.0 +parked_y_plate = -50.5 +parked_z = -49.5 +parked_z_robot = 30.0 + +in_beam_z_MIN_START_POS = 60.0 + + +#Aperture - Scatterguard positions +# 100 micron ap +miniap_x_LARGE_APERTURE = 2.389 +miniap_y_LARGE_APERTURE = 40.986 +miniap_z_LARGE_APERTURE = 15.8 + +sg_x_LARGE_APERTURE = 5.25 +sg_y_LARGE_APERTURE = 4.43 + +# 50 micron ap +miniap_x_MEDIUM_APERTURE = 2.384 +miniap_y_MEDIUM_APERTURE = 44.967 +miniap_z_MEDIUM_APERTURE = 15.8 +sg_x_MEDIUM_APERTURE = 5.285 +sg_y_MEDIUM_APERTURE = 0.46 + +# 20 micron ap +miniap_x_SMALL_APERTURE = 2.430 +miniap_y_SMALL_APERTURE = 48.974 +miniap_z_SMALL_APERTURE = 15.8 +sg_x_SMALL_APERTURE = 5.3375 +sg_y_SMALL_APERTURE = -3.55 + +# Robot load +miniap_x_ROBOT_LOAD = 2.386 +miniap_y_ROBOT_LOAD = 31.40 +miniap_z_ROBOT_LOAD = 15.8 +sg_x_ROBOT_LOAD = 5.25 +sg_y_ROBOT_LOAD = 4.43 + +# manual mount +miniap_x_MANUAL_LOAD = -4.91 +miniap_y_MANUAL_LOAD = -49.0 +miniap_z_MANUAL_LOAD = -10.0 + +sg_x_MANUAL_LOAD = -4.7 +sg_y_MANUAL_LOAD = 1.8 + +miniap_x_SCIN_MOVE = -4.91 +# prion setting +#miniap_x_SCIN_MOVE = 0.0 +sg_x_SCIN_MOVE = -4.75 + +scin_y_SCIN_IN = 100.855 +scin_y_SCIN_OUT = -0.02 +scin_z_SCIN_IN = 101.5115 + + +scin_z_SCIN_OUT = 0.1 + +#distance to move gonx,y,z when scintillator is put in with standard pins +# For old gonio: +gon_x_SCIN_OUT_DISTANCE = 1.0 +# For SmarGon: +gon_x_SCIN_OUT_DISTANCE_smargon = 1 + +gon_y_SCIN_OUT_DISTANCE = 2.0 +gon_z_SCIN_OUT_DISTANCE = -0.5 + +# StandardEnergy on i03 is 12700eV +StandardEnergy = 12700 + +keyence_max_attempts = 1 +# Move gonio 100 microns, see difference in keyence values +# Then do 100/difference, put that number below +# Sign may change between Smargon and MiniKappa +keyence_slopeYToX = 2.5 +keyence_slopeYToY = -2.5 +keyence_slopeXToZ = 3.23 + +YAGSamX = 1022 +YAGSamY = -98.0 +YAGSamZ = -147 +YAGOmega = 0.0 + +#ipin value must be < ipin_threshold above background for data collection +ipin_threshold = 0.1 + +# energy thresholds for mirror stripes +# - first threshold is between bare/Rh stripes (e.g. 7000) +# - second threshold is between Rh/Pt stripes (e.g. 18000) +mirror_threshold_bare_rh = 6900 +mirror_threshold_rh_pt = 30000 + +# flux conversion factors +flux_factor_no_aperture = 1 +flux_factor_LARGE_APERTURE = 0.738 +flux_factor_MEDIUM_APERTURE = 0.36 +flux_factor_SMALL_APERTURE = 0.084 +flux_factor_no_aperture_plate = 1 +flux_factor_LARGE_APERTURE_plate = 0.738 +flux_factor_MEDIUM_APERTURE_plate = 0.36 +flux_factor_SMALL_APERTURE_plate = 0.084 + +#Deadtime settings +fluorescence_analyser_deadtimeThreshold=0.002 # used by edge scans +fluorescence_spectrum_deadtimeThreshold=0.0005 # used by spectrum + +#Other settings +fluorescence_attenuation_low_roi = 100 +fluorescence_attenuation_high_roi = 2048 +attenuation_optimisation_optimisation_cycles = 10 +attenuation_optimisation_start_transmission = 0.1 # per cent +fluorescence_mca_sca_offset = 400 + +#Total count settings +attenuation_optimisation_multiplier = 2 +attenuation_optimisation_target_count = 2000 +attenuation_optimisation_upper_limit = 50000 +attenuation_optimisation_lower_limit = 20000 diff --git a/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Pitch_converter.txt b/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Pitch_converter.txt new file mode 100644 index 000000000..449e920f7 --- /dev/null +++ b/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Pitch_converter.txt @@ -0,0 +1,25 @@ +# Bragg pitch +# Degree values for pitch are interpreted as mrad +# The values cannot change direction. +# last update 2023/06/26 NP +Units Deg mrad +Units Deg Deg +19.24347 -0.79775 +16.40949 -0.78679 +14.31123 -0.77838 +12.69287 -0.77276 +11.40555 -0.77276 +10.35662 -0.77031 +9.48522 -0.76693 +8.95826 -0.76387 +8.74953 -0.76387 +8.12020 -0.76387 +7.57556 -0.76354 +7.09950 -0.76166 +6.67997 -0.76044 +6.30732 -0.75953 +5.97411 -0.75845 +5.67434 -0.75796 +5.40329 -0.75789 +5.15700 -0.75551 +4.93218 -0.75513 diff --git a/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Roll_converter.txt b/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Roll_converter.txt new file mode 100644 index 000000000..9b5b52dcb --- /dev/null +++ b/tests/test_data/test_daq_configuration/lookup/BeamLineEnergy_DCM_Roll_converter.txt @@ -0,0 +1,6 @@ +#Bragg angle against roll( absolute number) +#reloadLookupTables() +# last update 2023/01/19 NP +Units Deg mrad +26.4095 -0.2799 +6.3075 -0.2799 diff --git a/tests/test_data/test_det_dist_converter.txt b/tests/test_data/test_det_dist_converter.txt new file mode 100644 index 000000000..084853e43 --- /dev/null +++ b/tests/test_data/test_det_dist_converter.txt @@ -0,0 +1,7 @@ +#Table giving position of beam X and Y as a function of detector distance +#Units mm mm mm +# Eiger values +# distance beamY beamX (values from mosflm) +Units mm mm mm +200 153.61 162.45 +500 153.57 159.96 diff --git a/tests/test_data/test_display.configuration b/tests/test_data/test_display.configuration new file mode 100755 index 000000000..dfb01954a --- /dev/null +++ b/tests/test_data/test_display.configuration @@ -0,0 +1,42 @@ +zoomLevel = 1.0 +crosshairX = 477 +crosshairY = 359 +topLeftX = 383 +topLeftY = 253 +bottomRightX = 410 +bottomRightY = 278 +zoomLevel = 2.5 +crosshairX = 493 +crosshairY = 355 +topLeftX = 340 +topLeftY = 283 +bottomRightX = 388 +bottomRightY = 322 +zoomLevel = 5.0 +crosshairX = 517 +crosshairY = 350 +topLeftX = 268 +topLeftY = 326 +bottomRightX = 354 +bottomRightY = 387 +zoomLevel = 7.5 +crosshairX = 549 +crosshairY = 347 +topLeftX = 248 +topLeftY = 394 +bottomRightX = 377 +bottomRightY = 507 +zoomLevel = 10.0 +crosshairX = 613 +crosshairY = 344 +topLeftX = 2 +topLeftY = 489 +bottomRightX = 206 +bottomRightY = 630 +zoomLevel = 15.0 +crosshairX = 693 +crosshairY = 339 +topLeftX = 1 +topLeftY = 601 +bottomRightX = 65 +bottomRightY = 767 diff --git a/tests/test_data/test_jCameraManZoomLevels.xml b/tests/test_data/test_jCameraManZoomLevels.xml new file mode 100644 index 000000000..d751fd697 --- /dev/null +++ b/tests/test_data/test_jCameraManZoomLevels.xml @@ -0,0 +1,42 @@ + + + + + 1.0 + 0 + 2.87 + 2.87 + + + 2.5 + 10 + 2.31 + 2.31 + + + 5.0 + 25 + 1.58 + 1.58 + + + 7.5 + 50 + 0.806 + 0.806 + + + 10.0 + 75 + 0.438 + 0.438 + + + 15.0 + 90 + 0.302 + 0.302 + + +1.0 + diff --git a/tests/test_data/test_lookup_table.txt b/tests/test_data/test_lookup_table.txt new file mode 100644 index 000000000..16fa297a0 --- /dev/null +++ b/tests/test_data/test_lookup_table.txt @@ -0,0 +1,5 @@ +# Beam converter lookup table for testing + +Units det_dist beam_x beam_y +100.0 150.0 160.0 +200.0 151.0 165.0 diff --git a/tests/test_data/test_mirror_focus.json b/tests/test_data/test_mirror_focus.json new file mode 100644 index 000000000..b64009549 --- /dev/null +++ b/tests/test_data/test_mirror_focus.json @@ -0,0 +1,75 @@ +{ + + "sample": { + + "bare": { + "hfm": [1, 107, 15, 139, 41, 165, 11, 6, 166, -65, 0, -38, 179, 128], + "vfm": [140, 100, 70, 30, 30, -65, 24, 15] + }, + + "rh": { + "hfm": [11, 117, 25, 149, 51, 145, -9, -14, 146, -10, 55, 17, 144, 93], + "vfm": [124, 114, 34, 49, 19, -116, 4, -46] + }, + + "pt": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [154, 180, 146, 116, 98, 7, 92, 118] + } + }, + + "detector": { + + "bare": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [120, 138, 108, 101, 77, -1, 81, 149] + }, + + "rh": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [154, 154, 137, 86, 93, -13, 110, 141] + }, + + "pt": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [154, 154, 137, 86, 93, -13, 110, 141] + } + }, + + "defocussed": { + + "bare": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [145, 163, 133, 126, 102, 24, 106, 174] + }, + + "rh": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [115, 133, 103, 96, 72, -6, 76, 144] + }, + + "pt": { + "hfm": [11, 143, -82, 81, -58, 141, -47, -47, 140, -75, -30, -107, 145, 86], + "vfm": [145, 163, 133, 126, 102, 24, 106, 174] + } + }, + + "minibeam": { + + "bare": { + "hfm": [97, 99, 62, 115, 152, 125, 0, 0, 125, 140, 172, 96, 190, 190], + "vfm": [132, 143, 157, 175, 179, 54, 182, 166] + }, + + "rh": { + "hfm": [130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130], + "vfm": [162, 244, 225, 135, 171, 39, 106, -38] + }, + + "pt": { + "hfm": [100, 74, 37, 64, 72, 142, 62, 50, 50, 0, 47, -35, -46, 72], + "vfm": [64, 175, 130, 123, 101, 56, 29, 34] + } + } + +} diff --git a/tests/unit_tests/__init__.py b/tests/unit_tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/beamlines/__init__.py b/tests/unit_tests/beamlines/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/i04/callbacks/test_murko_callback.py b/tests/unit_tests/beamlines/i04/callbacks/test_murko_callback.py similarity index 97% rename from tests/i04/callbacks/test_murko_callback.py rename to tests/unit_tests/beamlines/i04/callbacks/test_murko_callback.py index 2e160df15..5e3cd70e3 100644 --- a/tests/i04/callbacks/test_murko_callback.py +++ b/tests/unit_tests/beamlines/i04/callbacks/test_murko_callback.py @@ -4,7 +4,7 @@ import pytest from event_model import Event -from mx_bluesky.i04.callbacks.murko_callback import MurkoCallback +from mx_bluesky.beamlines.i04.callbacks.murko_callback import MurkoCallback test_oav_uuid = "UUID" test_smargon_data = 90 diff --git a/tests/i04/test_thawing.py b/tests/unit_tests/beamlines/i04/test_thawing.py similarity index 96% rename from tests/i04/test_thawing.py rename to tests/unit_tests/beamlines/i04/test_thawing.py index 7ae37c715..6030a0223 100644 --- a/tests/i04/test_thawing.py +++ b/tests/unit_tests/beamlines/i04/test_thawing.py @@ -18,7 +18,7 @@ ) from ophyd_async.epics.motion import Motor -from mx_bluesky.i04.thawing_plan import thaw, thaw_and_center +from mx_bluesky.beamlines.i04.thawing_plan import thaw, thaw_and_center DISPLAY_CONFIGURATION = "tests/devices/unit_tests/test_display.configuration" ZOOM_LEVELS_XML = "tests/devices/unit_tests/test_jCameraManZoomLevels.xml" @@ -157,7 +157,7 @@ def test_given_different_rotations_then_motor_moved_relative( ] -@patch("mx_bluesky.i04.thawing_plan.MurkoCallback") +@patch("mx_bluesky.beamlines.i04.thawing_plan.MurkoCallback") def test_thaw_and_centre_adds_murko_callback_and_produces_expected_messages( patch_murko_callback: MagicMock, smargon: Smargon, @@ -192,7 +192,7 @@ def test_thaw_and_centre_adds_murko_callback_and_produces_expected_messages( assert len(smargon_updates) > 0 -@patch("mx_bluesky.i04.thawing_plan.MurkoCallback.call_murko") +@patch("mx_bluesky.beamlines.i04.thawing_plan.MurkoCallback.call_murko") def test_thaw_and_centre_will_produce_events_that_call_murko( patch_murko_call: MagicMock, smargon: Smargon, diff --git a/tests/unit_tests/beamlines/i24/serial/__init__.py b/tests/unit_tests/beamlines/i24/serial/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/i24/serial/conftest.py b/tests/unit_tests/beamlines/i24/serial/conftest.py similarity index 100% rename from tests/i24/serial/conftest.py rename to tests/unit_tests/beamlines/i24/serial/conftest.py diff --git a/tests/unit_tests/beamlines/i24/serial/extruder/__init__.py b/tests/unit_tests/beamlines/i24/serial/extruder/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/i24/serial/extruder/test_extruder_collect.py b/tests/unit_tests/beamlines/i24/serial/extruder/test_extruder_collect.py similarity index 62% rename from tests/i24/serial/extruder/test_extruder_collect.py rename to tests/unit_tests/beamlines/i24/serial/extruder/test_extruder_collect.py index 087bdc12c..79e2fcdfd 100644 --- a/tests/i24/serial/extruder/test_extruder_collect.py +++ b/tests/unit_tests/beamlines/i24/serial/extruder/test_extruder_collect.py @@ -5,7 +5,7 @@ from dodal.devices.zebra import DISCONNECT, SOFT_IN3 from ophyd_async.core import get_mock_put -from mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2 import ( +from mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2 import ( TTL_EIGER, TTL_PILATUS, collection_aborted_plan, @@ -16,8 +16,8 @@ main_extruder_plan, tidy_up_at_collection_end_plan, ) -from mx_bluesky.i24.serial.parameters import ExtruderParameters -from mx_bluesky.i24.serial.setup_beamline import Eiger, Pilatus +from mx_bluesky.beamlines.i24.serial.parameters import ExtruderParameters +from mx_bluesky.beamlines.i24.serial.setup_beamline import Eiger, Pilatus @pytest.fixture @@ -57,11 +57,15 @@ def fake_generator(value): return value -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.get_detector_type") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.logger") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.get_detector_type" +) +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.logger") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging" +) def test_initialise_extruder( fake_log_setup, fake_log, @@ -78,7 +82,9 @@ def test_initialise_extruder( assert fake_caget.call_count == 1 -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging" +) async def test_enterhutch(fake_log_setup, detector_stage, RE): RE(enter_hutch(detector_stage)) assert await detector_stage.z.user_readback.get_value() == 1480 @@ -93,8 +99,12 @@ async def test_enterhutch(fake_log_setup, detector_stage, RE): ("laseroff", Pilatus(), "No", DISCONNECT), ], ) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.get_detector_type") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.get_detector_type" +) +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging" +) async def test_laser_check( fake_log_setup, fake_det, @@ -114,17 +124,21 @@ async def test_laser_check( assert await zebra.output.out_pvs[TTL].get_value() == expected_out -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.call_nexgen") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.call_nexgen" +) +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_zebra_for_quickshot_plan" +) @patch( - "mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_zebra_for_quickshot_plan" + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging" ) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") def test_run_extruder_quickshot_with_eiger( fake_read, fake_log_setup, @@ -171,16 +185,18 @@ def test_run_extruder_quickshot_with_eiger( mock_quickshot_plan.assert_called_once() -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") @patch( - "mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_zebra_for_extruder_with_pump_probe_plan" + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_zebra_for_extruder_with_pump_probe_plan" ) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") def test_run_extruder_pump_probe_with_pilatus( fake_read, fake_log_setup, @@ -231,12 +247,12 @@ def test_run_extruder_pump_probe_with_pilatus( @patch( - "mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.reset_zebra_when_collection_done_plan" + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.reset_zebra_when_collection_done_plan" ) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caget") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sup") def test_tidy_up_at_collection_end_plan_with_eiger( fake_sup, fake_caget, @@ -260,10 +276,12 @@ def test_tidy_up_at_collection_end_plan_with_eiger( fake_sup.eiger.assert_called_once_with("return-to-normal", None) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.disarm_zebra") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") +@patch( + "mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.disarm_zebra" +) def test_aborted_plan_with_pilatus( mock_disarm, fake_dcid, fake_caput, fake_sleep, RE, zebra ): @@ -274,10 +292,10 @@ def test_aborted_plan_with_pilatus( fake_dcid.collection_complete.assert_called_once_with(ANY, aborted=True) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.shutil") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.shutil") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.DCID") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.sleep") +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.caput") def test_collection_complete_plan_with_eiger( fake_caput, fake_sleep, fake_dcid, fake_shutil, dummy_params, RE ): diff --git a/tests/unit_tests/beamlines/i24/serial/fixed_target/__init__.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/i24/serial/fixed_target/conftest.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/conftest.py similarity index 74% rename from tests/i24/serial/fixed_target/conftest.py rename to tests/unit_tests/beamlines/i24/serial/fixed_target/conftest.py index 64501fd2a..84a7d145a 100644 --- a/tests/i24/serial/fixed_target/conftest.py +++ b/tests/unit_tests/beamlines/i24/serial/fixed_target/conftest.py @@ -1,7 +1,10 @@ import pytest -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType -from mx_bluesky.i24.serial.parameters import FixedTargetParameters, get_chip_format +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType +from mx_bluesky.beamlines.i24.serial.parameters import ( + FixedTargetParameters, + get_chip_format, +) @pytest.fixture diff --git a/tests/i24/serial/fixed_target/test_chip_manager.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_manager.py similarity index 67% rename from tests/i24/serial/fixed_target/test_chip_manager.py rename to tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_manager.py index 57d1a152d..e29bbf795 100644 --- a/tests/i24/serial/fixed_target/test_chip_manager.py +++ b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_manager.py @@ -8,8 +8,8 @@ from dodal.devices.i24.pmac import PMAC from ophyd_async.core import get_mock_put -from mx_bluesky.i24.serial.fixed_target.ft_utils import Fiducials -from mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1 import ( +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import Fiducials +from mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1 import ( cs_maker, cs_reset, initialise_stages, @@ -35,10 +35,14 @@ cs_json = '{"scalex":1, "scaley":2, "scalez":3, "skew":-0.5, "Sx_dir":1, "Sy_dir":-1, "Sz_dir":0}' -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.sys") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.get_detector_type") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.sys") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.get_detector_type" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") async def test_initialise( fake_caput: MagicMock, fake_det: MagicMock, @@ -65,8 +69,10 @@ async def test_initialise( ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") async def test_moveto_oxford_origin( fake_caget: MagicMock, fake_log: MagicMock, pmac: PMAC, RE ): @@ -77,8 +83,10 @@ async def test_moveto_oxford_origin( assert await pmac.y.user_readback.get_value() == 0.0 -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") async def test_moveto_oxford_inner_f1( fake_caget: MagicMock, fake_log: MagicMock, pmac: PMAC, RE ): @@ -89,14 +97,18 @@ async def test_moveto_oxford_inner_f1( assert await pmac.y.user_readback.get_value() == 0.0 -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) async def test_moveto_chip_aspecific(fake_log: MagicMock, pmac: PMAC, RE): RE(moveto("zero", pmac)) assert await pmac.pmac_string.get_value() == "!x0y0z0" -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") async def test_moveto_preset( fake_caput: MagicMock, fake_log: MagicMock, @@ -122,8 +134,10 @@ async def test_moveto_preset( ("microdrop_position", 0, [6.0, -7.8, 0.0], False), ], ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") async def test_moveto_preset_with_pmac_move( fake_caput: MagicMock, fake_log: MagicMock, @@ -158,7 +172,9 @@ async def test_moveto_preset_with_pmac_move( ("laser2off", " M812=0 M811=1"), ], ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) async def test_laser_control_on_and_off( fake_log: MagicMock, laser_setting: str, expected_pmac_string: str, pmac: PMAC, RE ): @@ -167,9 +183,13 @@ async def test_laser_control_on_and_off( assert await pmac.pmac_string.get_value() == expected_pmac_string -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.bps.sleep") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.bps.sleep" +) def test_laser_control_burn_setting( fake_sleep: MagicMock, fake_caget: MagicMock, fake_log: MagicMock, pmac: PMAC, RE ): @@ -187,7 +207,7 @@ def test_laser_control_burn_setting( @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", mock_open(read_data=mtr_dir_str), ) def test_scrape_mtr_directions(): @@ -197,7 +217,7 @@ def test_scrape_mtr_directions(): @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", mock_open(read_data=fiducial_1_str), ) def test_scrape_mtr_fiducials(): @@ -228,9 +248,11 @@ def test_cs_pmac_str_set(pmac: PMAC, RE): ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.set_pmac_strings_for_cs" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.set_pmac_strings_for_cs" ) def test_cs_reset(mock_set_pmac_str: MagicMock, fake_log: MagicMock, pmac: PMAC, RE): RE(cs_reset(pmac)) @@ -238,17 +260,19 @@ def test_cs_reset(mock_set_pmac_str: MagicMock, fake_log: MagicMock, pmac: PMAC, @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", mock_open(read_data='{"a":11, "b":12,}'), ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" ) @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" +) +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") def test_cs_maker_raises_error_for_invalid_json( fake_log: MagicMock, fake_fid: MagicMock, @@ -264,17 +288,19 @@ def test_cs_maker_raises_error_for_invalid_json( @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", mock_open(read_data='{"scalex":11, "skew":12}'), ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" +) @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" ) @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") def test_cs_maker_raises_error_for_missing_key_in_json( fake_log: MagicMock, fake_fid: MagicMock, @@ -290,17 +316,19 @@ def test_cs_maker_raises_error_for_missing_key_in_json( @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.open", mock_open(read_data=cs_json), ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_directions" ) @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.scrape_mtr_fiducials" +) +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") def test_cs_maker_raises_error_for_wrong_direction_in_json( fake_log: MagicMock, fake_fid: MagicMock, @@ -315,9 +343,11 @@ def test_cs_maker_raises_error_for_wrong_direction_in_json( RE(cs_maker(pmac)) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.setup_logging" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caput") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Manager_py3v1.caget") def test_pumpprobe_calc( fake_caget: MagicMock, fake_caput: MagicMock, fake_log: MagicMock, RE ): diff --git a/tests/i24/serial/fixed_target/test_chip_startup.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_startup.py similarity index 78% rename from tests/i24/serial/fixed_target/test_chip_startup.py rename to tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_startup.py index 77e38c666..9e02cf4e7 100644 --- a/tests/i24/serial/fixed_target/test_chip_startup.py +++ b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_chip_startup.py @@ -2,7 +2,7 @@ import pytest -from mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1 import ( +from mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1 import ( check_files, fiducials, pathli, @@ -15,9 +15,9 @@ def test_fiducials(): assert fiducials(2) is None -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1.os") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1.os") @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1.read_parameter_file" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_StartUp_py3v1.read_parameter_file" ) def test_check_files(fake_read_params, mock_os, dummy_params_without_pp): fake_read_params.return_value = dummy_params_without_pp diff --git a/tests/i24/serial/fixed_target/test_ft_collect.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_ft_collect.py similarity index 76% rename from tests/i24/serial/fixed_target/test_ft_collect.py rename to tests/unit_tests/beamlines/i24/serial/fixed_target/test_ft_collect.py index 16736cc04..618d707d5 100644 --- a/tests/i24/serial/fixed_target/test_ft_collect.py +++ b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_ft_collect.py @@ -7,8 +7,8 @@ from dodal.devices.zebra import Zebra from ophyd_async.core import get_mock_put -from mx_bluesky.i24.serial.fixed_target.ft_utils import MappingType -from mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1 import ( +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import MappingType +from mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1 import ( datasetsizei24, finish_i24, get_chip_prog_values, @@ -30,12 +30,12 @@ def fake_generator(value): return value -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") def test_datasetsizei24_for_one_block_and_two_exposures( fake_caput, dummy_params_without_pp ): with patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.open", + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.open", mock_open(read_data=chipmap_str), ): tot_num_imgs = datasetsizei24(2, dummy_params_without_pp.chip, MappingType.Lite) @@ -110,11 +110,11 @@ def test_load_motion_program_data( mock_pmac_str.assert_has_calls(call_list) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caget") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sup") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caget") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sup") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") def test_start_i24_with_eiger( fake_sleep, fake_sup, @@ -158,15 +158,19 @@ def test_start_i24_with_eiger( mock_shutter.assert_has_calls(shutter_call_list) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.write_userlog") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.cagetstring") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caget") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sup") @patch( - "mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.reset_zebra_when_collection_done_plan" + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.write_userlog" ) -@patch("mx_bluesky.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.cagetstring" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caget") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sup") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.reset_zebra_when_collection_done_plan" +) +@patch("mx_bluesky.beamlines.i24.serial.extruder.i24ssx_Extruder_Collect_py3v2.bps.rd") def test_finish_i24( fake_read, fake_reset_zebra, @@ -200,7 +204,7 @@ def test_finish_i24( fake_userlog.assert_called_once_with(dummy_params_without_pp, "chip_01", 0.0, 0.6) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") def test_run_aborted_plan(fake_dcid: MagicMock, pmac: PMAC, RE): RE(run_aborted_plan(pmac, fake_dcid)) @@ -213,10 +217,12 @@ def test_run_aborted_plan(fake_dcid: MagicMock, pmac: PMAC, RE): fake_dcid.collection_complete.assert_called_once_with(ANY, aborted=True) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.finish_i24") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.finish_i24" +) +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.sleep") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.DCID") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_Chip_Collect_py3v1.caput") async def test_tidy_up_after_collection_plan( fake_caput, fake_dcid, diff --git a/tests/i24/serial/fixed_target/test_moveonclick.py b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_moveonclick.py similarity index 83% rename from tests/i24/serial/fixed_target/test_moveonclick.py rename to tests/unit_tests/beamlines/i24/serial/fixed_target/test_moveonclick.py index b20b8e8d4..b91c477cf 100644 --- a/tests/i24/serial/fixed_target/test_moveonclick.py +++ b/tests/unit_tests/beamlines/i24/serial/fixed_target/test_moveonclick.py @@ -6,7 +6,7 @@ from dodal.devices.oav.oav_detector import OAV from ophyd_async.core import get_mock_put -from mx_bluesky.i24.serial.fixed_target.i24ssx_moveonclick import ( +from mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_moveonclick import ( onMouse, update_ui, zoomcalibrator, @@ -33,7 +33,9 @@ ), ], ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_moveonclick._get_beam_centre") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_moveonclick._get_beam_centre" +) def test_onMouse_gets_beam_position_and_sends_correct_str( fake_get_beam_pos: MagicMock, beam_position: tuple, @@ -54,8 +56,10 @@ def test_onMouse_gets_beam_position_and_sends_correct_str( ) -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_moveonclick.cv") -@patch("mx_bluesky.i24.serial.fixed_target.i24ssx_moveonclick._get_beam_centre") +@patch("mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_moveonclick.cv") +@patch( + "mx_bluesky.beamlines.i24.serial.fixed_target.i24ssx_moveonclick._get_beam_centre" +) def test_update_ui_uses_correct_beam_centre_for_ellipse(fake_beam_pos, fake_cv): mock_frame = MagicMock() mock_oav = MagicMock() diff --git a/tests/i24/serial/parameters/test_utils.py b/tests/unit_tests/beamlines/i24/serial/parameters/test_utils.py similarity index 84% rename from tests/i24/serial/parameters/test_utils.py rename to tests/unit_tests/beamlines/i24/serial/parameters/test_utils.py index 7306b428c..a0690dd79 100644 --- a/tests/i24/serial/parameters/test_utils.py +++ b/tests/unit_tests/beamlines/i24/serial/parameters/test_utils.py @@ -2,8 +2,8 @@ import pytest -from mx_bluesky.i24.serial.fixed_target.ft_utils import ChipType -from mx_bluesky.i24.serial.parameters import get_chip_format +from mx_bluesky.beamlines.i24.serial.fixed_target.ft_utils import ChipType +from mx_bluesky.beamlines.i24.serial.parameters import get_chip_format @pytest.mark.parametrize( @@ -26,7 +26,7 @@ def test_get_chip_format_for_oxford_chips( assert test_defaults["x_step_size"] == expected_step_size -@patch("mx_bluesky.i24.serial.parameters.utils.caget") +@patch("mx_bluesky.beamlines.i24.serial.parameters.utils.caget") def test_get_chip_format_for_custom_chips(fake_caget: MagicMock): fake_caget.side_effect = ["10", "2", "0.2", "0.2"] test_chip_type = ChipType(2) diff --git a/tests/unit_tests/beamlines/i24/serial/setup_beamline/__init__.py b/tests/unit_tests/beamlines/i24/serial/setup_beamline/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/i24/serial/setup_beamline/test_pv_abstract.py b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_pv_abstract.py similarity index 70% rename from tests/i24/serial/setup_beamline/test_pv_abstract.py rename to tests/unit_tests/beamlines/i24/serial/setup_beamline/test_pv_abstract.py index 1ee233063..f469d0c01 100644 --- a/tests/i24/serial/setup_beamline/test_pv_abstract.py +++ b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_pv_abstract.py @@ -1,4 +1,4 @@ -from mx_bluesky.i24.serial.setup_beamline import Eiger, Pilatus +from mx_bluesky.beamlines.i24.serial.setup_beamline import Eiger, Pilatus def test_eiger(): diff --git a/tests/i24/serial/setup_beamline/test_setup_beamline.py b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_beamline.py similarity index 70% rename from tests/i24/serial/setup_beamline/test_setup_beamline.py rename to tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_beamline.py index 93b2b9564..c39085ddb 100644 --- a/tests/i24/serial/setup_beamline/test_setup_beamline.py +++ b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_beamline.py @@ -6,7 +6,7 @@ from dodal.devices.i24.dual_backlight import DualBacklight from dodal.devices.i24.i24_detector_motion import DetectorMotion -from mx_bluesky.i24.serial.setup_beamline import setup_beamline +from mx_bluesky.beamlines.i24.serial.setup_beamline import setup_beamline async def test_setup_beamline_for_collection_plan( @@ -28,30 +28,30 @@ async def test_move_detector_stage_to_position_plan(detector_stage: DetectorMoti assert await detector_stage.z.user_readback.get_value() == det_dist -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caput") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caput") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caget") def test_pilatus_raises_error_if_fastchip_and_no_args_list(fake_caget, fake_caput): with pytest.raises(TypeError): setup_beamline.pilatus("fastchip", None) -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caput") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caput") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caget") def test_pilatus_quickshot(fake_caget, fake_caput): setup_beamline.pilatus("quickshot", ["", "", 1, 0.1]) assert fake_caput.call_count == 12 assert fake_caget.call_count == 2 -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caput") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caput") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caget") def test_eiger_raises_error_if_quickshot_and_no_args_list(fake_caget, fake_caput): with pytest.raises(TypeError): setup_beamline.eiger("quickshot", None) -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caput") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_beamline.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caput") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_beamline.caget") def test_eiger_quickshot(fake_caget, fake_caput): setup_beamline.eiger("quickshot", ["", "", "1", "0.1"]) assert fake_caput.call_count == 32 diff --git a/tests/i24/serial/setup_beamline/test_setup_detector.py b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_detector.py similarity index 75% rename from tests/i24/serial/setup_beamline/test_setup_detector.py rename to tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_detector.py index fff211c86..0fd048653 100644 --- a/tests/i24/serial/setup_beamline/test_setup_detector.py +++ b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_setup_detector.py @@ -5,9 +5,9 @@ from dodal.devices.i24.i24_detector_motion import DetectorMotion from ophyd_async.core import set_mock_value -from mx_bluesky.i24.serial.parameters.constants import SSXType -from mx_bluesky.i24.serial.setup_beamline import Eiger, Pilatus -from mx_bluesky.i24.serial.setup_beamline.setup_detector import ( +from mx_bluesky.beamlines.i24.serial.parameters.constants import SSXType +from mx_bluesky.beamlines.i24.serial.setup_beamline import Eiger, Pilatus +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector import ( DetRequest, _get_requested_detector, get_detector_type, @@ -27,7 +27,7 @@ def test_get_detector_type_finds_pilatus(RE, detector_stage: DetectorMotion): assert det_type.name == "pilatus" -@patch("mx_bluesky.i24.serial.setup_beamline.setup_detector.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector.caget") def test_get_requested_detector(fake_caget): fake_caget.return_value = "pilatus" assert _get_requested_detector("some_pv") == Pilatus.name @@ -36,15 +36,15 @@ def test_get_requested_detector(fake_caget): assert _get_requested_detector("some_pv") == Eiger.name -@patch("mx_bluesky.i24.serial.setup_beamline.setup_detector.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector.caget") def test_get_requested_detector_raises_error_for_invalid_value(fake_caget): fake_caget.return_value = "something" with pytest.raises(ValueError): _get_requested_detector("some_pv") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_detector.setup_logging") -@patch("mx_bluesky.i24.serial.setup_beamline.setup_detector.caget") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector.setup_logging") +@patch("mx_bluesky.beamlines.i24.serial.setup_beamline.setup_detector.caget") async def test_setup_detector_stage( fake_caget, fake_log, detector_stage: DetectorMotion, RE: RunEngine ): diff --git a/tests/i24/serial/setup_beamline/test_zebra_plans.py b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_zebra_plans.py similarity index 98% rename from tests/i24/serial/setup_beamline/test_zebra_plans.py rename to tests/unit_tests/beamlines/i24/serial/setup_beamline/test_zebra_plans.py index a11ae5703..6a944fb9f 100644 --- a/tests/i24/serial/setup_beamline/test_zebra_plans.py +++ b/tests/unit_tests/beamlines/i24/serial/setup_beamline/test_zebra_plans.py @@ -11,7 +11,7 @@ Zebra, ) -from mx_bluesky.i24.serial.setup_beamline.setup_zebra_plans import ( +from mx_bluesky.beamlines.i24.serial.setup_beamline.setup_zebra_plans import ( arm_zebra, disarm_zebra, get_zebra_settings_for_extruder, diff --git a/tests/i24/serial/test_dcid.py b/tests/unit_tests/beamlines/i24/serial/test_dcid.py similarity index 68% rename from tests/i24/serial/test_dcid.py rename to tests/unit_tests/beamlines/i24/serial/test_dcid.py index e7f2ed3a4..1a4c1fe36 100644 --- a/tests/i24/serial/test_dcid.py +++ b/tests/unit_tests/beamlines/i24/serial/test_dcid.py @@ -1,17 +1,21 @@ from unittest.mock import patch -from mx_bluesky.i24.serial.dcid import get_beam_center, get_beamsize, get_resolution -from mx_bluesky.i24.serial.setup_beamline import Eiger, Pilatus +from mx_bluesky.beamlines.i24.serial.dcid import ( + get_beam_center, + get_beamsize, + get_resolution, +) +from mx_bluesky.beamlines.i24.serial.setup_beamline import Eiger, Pilatus -@patch("mx_bluesky.i24.serial.dcid.caget") +@patch("mx_bluesky.beamlines.i24.serial.dcid.caget") def test_beamsize(fake_caget): beam_size = get_beamsize() assert type(beam_size) is tuple assert fake_caget.call_count == 2 -@patch("mx_bluesky.i24.serial.dcid.caget") +@patch("mx_bluesky.beamlines.i24.serial.dcid.caget") def test_beam_center(fake_caget): beam_center = get_beam_center(Eiger()) assert type(beam_center) is tuple diff --git a/tests/i24/serial/test_log.py b/tests/unit_tests/beamlines/i24/serial/test_log.py similarity index 74% rename from tests/i24/serial/test_log.py rename to tests/unit_tests/beamlines/i24/serial/test_log.py index ac53d1aa0..bb7a15c25 100644 --- a/tests/i24/serial/test_log.py +++ b/tests/unit_tests/beamlines/i24/serial/test_log.py @@ -4,7 +4,7 @@ import pytest -from mx_bluesky.i24.serial import log +from mx_bluesky.beamlines.i24.serial import log @pytest.fixture @@ -19,8 +19,8 @@ def _destroy_handlers(logger): logger.handlers.clear() -@patch("mx_bluesky.i24.serial.log.environ") -@patch("mx_bluesky.i24.serial.log.Path.mkdir") +@patch("mx_bluesky.beamlines.i24.serial.log.environ") +@patch("mx_bluesky.beamlines.i24.serial.log.Path.mkdir") def test_logging_file_path(mock_dir, mock_environ): mock_environ.get.return_value = None log_path = log._get_logging_file_path() @@ -28,9 +28,9 @@ def test_logging_file_path(mock_dir, mock_environ): assert log_path.as_posix() == "tmp/logs" -@patch("mx_bluesky.i24.serial.log._read_visit_directory_from_file") -@patch("mx_bluesky.i24.serial.log.environ") -@patch("mx_bluesky.i24.serial.log.Path.mkdir") +@patch("mx_bluesky.beamlines.i24.serial.log._read_visit_directory_from_file") +@patch("mx_bluesky.beamlines.i24.serial.log.environ") +@patch("mx_bluesky.beamlines.i24.serial.log.Path.mkdir") def test_logging_file_path_on_beamline(mock_dir, mock_environ, mock_visit): mock_environ.get.return_value = "i24" mock_visit.return_value = Path("/path/to/i24/data") @@ -45,17 +45,17 @@ def test_basic_logging_config(dummy_logger): assert dummy_logger.handlers[0].level == logging.DEBUG -@patch("mx_bluesky.i24.serial.log.integrate_bluesky_and_ophyd_logging") +@patch("mx_bluesky.beamlines.i24.serial.log.integrate_bluesky_and_ophyd_logging") def test_default_logging_setup_removes_dodal_stream(mock_blusky_ophyd_logs): - with patch("mx_bluesky.i24.serial.log.dodal_logger") as mock_dodal_logger: + with patch("mx_bluesky.beamlines.i24.serial.log.dodal_logger") as mock_dodal_logger: log.default_logging_setup(dev_mode=True) mock_blusky_ophyd_logs.assert_called_once() assert mock_dodal_logger.addHandler.call_count == 4 mock_dodal_logger.removeHandler.assert_called_once() -@patch("mx_bluesky.i24.serial.log.Path.mkdir") -@patch("mx_bluesky.i24.serial.log.default_logging_setup") +@patch("mx_bluesky.beamlines.i24.serial.log.Path.mkdir") +@patch("mx_bluesky.beamlines.i24.serial.log.default_logging_setup") def test_logging_config_with_filehandler(mock_default, mock_dir, dummy_logger): # dodal handlers mocked out log.config("dummy.log", delayed=True, dev_mode=True) diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py new file mode 100644 index 000000000..cee8cdfa7 --- /dev/null +++ b/tests/unit_tests/conftest.py @@ -0,0 +1,31 @@ +import asyncio +import os +import time + +import pytest +from bluesky.run_engine import RunEngine + +# Prevent pytest from catching exceptions when debugging in vscode so that break on +# exception works correctly (see: https://github.com/pytest-dev/pytest/issues/7409) +if os.getenv("PYTEST_RAISE", "0") == "1": + + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value + + +@pytest.fixture +async def RE(): + RE = RunEngine(call_returns_result=True) + # make sure the event loop is thoroughly up and running before we try to create + # any ophyd_async devices which might need it + timeout = time.monotonic() + 1 + while not RE.loop.is_running(): + await asyncio.sleep(0) + if time.monotonic() > timeout: + raise TimeoutError("This really shouldn't happen but just in case...") + yield RE diff --git a/tests/unit_tests/hyperion/__init__.py b/tests/unit_tests/hyperion/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/conftest.py b/tests/unit_tests/hyperion/conftest.py new file mode 100644 index 000000000..1c34d0f88 --- /dev/null +++ b/tests/unit_tests/hyperion/conftest.py @@ -0,0 +1,80 @@ +from pathlib import Path +from unittest.mock import patch + +import pytest +from event_model import Event, EventDescriptor + +from mx_bluesky.hyperion.parameters.constants import CONST + +BANNED_PATHS = [Path("/dls"), Path("/dls_sw")] + + +@pytest.fixture(autouse=True) +def patch_open_to_prevent_dls_reads_in_tests(): + unpatched_open = open + + def patched_open(*args, **kwargs): + requested_path = Path(args[0]) + if requested_path.is_absolute(): + for p in BANNED_PATHS: + assert not requested_path.is_relative_to( + p + ), f"Attempt to open {requested_path} from inside a unit test" + return unpatched_open(*args, **kwargs) + + with patch("builtins.open", side_effect=patched_open): + yield [] + + +class OavGridSnapshotTestEvents: + test_descriptor_document_oav_snapshot: EventDescriptor = { + "uid": "b5ba4aec-de49-4970-81a4-b4a847391d34", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": CONST.DESCRIPTORS.OAV_GRID_SNAPSHOT_TRIGGERED, + } # type: ignore + test_event_document_oav_snapshot_xy: Event = { + "descriptor": "b5ba4aec-de49-4970-81a4-b4a847391d34", + "time": 1666604299.828203, + "timestamps": {}, + "seq_num": 1, + "uid": "29033ecf-e052-43dd-98af-c7cdd62e8174", + "data": { + "oav_grid_snapshot_top_left_x": 50, + "oav_grid_snapshot_top_left_y": 100, + "oav_grid_snapshot_num_boxes_x": 40, + "oav_grid_snapshot_num_boxes_y": 20, + "oav_grid_snapshot_microns_per_pixel_x": 1.25, + "oav_grid_snapshot_microns_per_pixel_y": 1.5, + "oav_grid_snapshot_box_width": 0.1 * 1000 / 1.25, # size in pixels + "oav_grid_snapshot_last_path_full_overlay": "test_1_y", + "oav_grid_snapshot_last_path_outer": "test_2_y", + "oav_grid_snapshot_last_saved_path": "test_3_y", + "smargon-omega": 0, + "smargon-x": 0, + "smargon-y": 0, + "smargon-z": 0, + }, + } + test_event_document_oav_snapshot_xz: Event = { + "descriptor": "b5ba4aec-de49-4970-81a4-b4a847391d34", + "time": 1666604299.828203, + "timestamps": {}, + "seq_num": 1, + "uid": "29033ecf-e052-43dd-98af-c7cdd62e8174", + "data": { + "oav_grid_snapshot_top_left_x": 50, + "oav_grid_snapshot_top_left_y": 0, + "oav_grid_snapshot_num_boxes_x": 40, + "oav_grid_snapshot_num_boxes_y": 10, + "oav_grid_snapshot_box_width": 0.1 * 1000 / 1.25, # size in pixels + "oav_grid_snapshot_last_path_full_overlay": "test_1_z", + "oav_grid_snapshot_last_path_outer": "test_2_z", + "oav_grid_snapshot_last_saved_path": "test_3_z", + "oav_grid_snapshot_microns_per_pixel_x": 1.25, + "oav_grid_snapshot_microns_per_pixel_y": 1.5, + "smargon-omega": -90, + "smargon-x": 0, + "smargon-y": 0, + "smargon-z": 0, + }, + } diff --git a/tests/unit_tests/hyperion/device_setup_plans/__init__.py b/tests/unit_tests/hyperion/device_setup_plans/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_dcm_pitch_roll_mirror_adjuster.py b/tests/unit_tests/hyperion/device_setup_plans/test_dcm_pitch_roll_mirror_adjuster.py new file mode 100644 index 000000000..7c17b7ac8 --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_dcm_pitch_roll_mirror_adjuster.py @@ -0,0 +1,244 @@ +from threading import Timer +from unittest.mock import MagicMock, PropertyMock, patch + +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from dodal.common.beamlines.beamline_parameters import GDABeamlineParameters +from dodal.devices.focusing_mirror import ( + FocusingMirrorWithStripes, + MirrorStripe, + MirrorVoltageDemand, + VFMMirrorVoltages, +) +from dodal.devices.undulator_dcm import UndulatorDCM +from ophyd import EpicsSignal +from ophyd.sim import NullStatus +from ophyd.status import Status + +from mx_bluesky.hyperion.device_setup_plans import dcm_pitch_roll_mirror_adjuster +from mx_bluesky.hyperion.device_setup_plans.dcm_pitch_roll_mirror_adjuster import ( + adjust_dcm_pitch_roll_vfm_from_lut, + adjust_mirror_stripe, +) + + +def test_apply_and_wait_for_voltages_to_settle_happy_path( + RE: RunEngine, + vfm_mirror_voltages: VFMMirrorVoltages, + vfm: FocusingMirrorWithStripes, +): + with patch.object( + vfm_mirror_voltages, + "voltage_channels", + new_callable=_all_demands_accepted(vfm_mirror_voltages), + ): + RE( + dcm_pitch_roll_mirror_adjuster._apply_and_wait_for_voltages_to_settle( + MirrorStripe.BARE, vfm, vfm_mirror_voltages + ) + ) + + for channel, expected_voltage in zip( + vfm_mirror_voltages.voltage_channels.values(), + [140, 100, 70, 30, 30, -65, 24, 15], + strict=False, + ): + channel.set.assert_called_once_with(expected_voltage) # type: ignore + + +def _mock_channel(magic_mock, accept_demand): + def not_ok_then_ok(new_value): + if accept_demand: + status = Status() + Timer(0.2, lambda: status.set_finished()).start() + else: + status = Status(timeout=0.2) + return status + + magic_mock.set.side_effect = not_ok_then_ok + return magic_mock + + +def _all_demands_accepted(vfm_mirror_voltages): + mock_channels = { + i: _mock_channel(MagicMock(), True) + for i in vfm_mirror_voltages.voltage_channels.keys() + } + + voltage_channels = PropertyMock() + voltage_channels.return_value = mock_channels + return voltage_channels + + +def _one_demand_not_accepted(vfm_mirror_voltages): + mock_channels = { + i: _mock_channel(MagicMock(), i != 0) + for i in vfm_mirror_voltages.voltage_channels.keys() + } + + voltage_channels = PropertyMock() + voltage_channels.return_value = mock_channels + return voltage_channels + + +@patch("dodal.devices.focusing_mirror.DEFAULT_SETTLE_TIME_S", 3) +def test_apply_and_wait_for_voltages_to_settle_timeout( + RE: RunEngine, + vfm_mirror_voltages: VFMMirrorVoltages, + vfm: FocusingMirrorWithStripes, +): + with patch.object( + vfm_mirror_voltages, + "voltage_channels", + new_callable=_one_demand_not_accepted(vfm_mirror_voltages), + ): + actual_exception = None + + try: + RE( + dcm_pitch_roll_mirror_adjuster._apply_and_wait_for_voltages_to_settle( + MirrorStripe.BARE, vfm, vfm_mirror_voltages + ) + ) + except Exception as e: + actual_exception = e + + assert actual_exception is not None + # Check that all voltages set in parallel + for channel, expected_voltage in zip( + vfm_mirror_voltages.voltage_channels.values(), + [140, 100, 70, 30, 30, -65, 24, 15], + strict=False, + ): + channel.set.assert_called_once_with(expected_voltage) # type: ignore + + +def _mock_voltage_channel(setpoint: EpicsSignal, demand_accepted: EpicsSignal): + def set_demand_and_return_ok(_): + demand_accepted.sim_put(MirrorVoltageDemand.OK) # type: ignore + return NullStatus() + + setpoint.set = MagicMock(side_effect=set_demand_and_return_ok) + + +mirror_stripe_params = [ + (6.999, "Bare", 140, 15), + (7.001, "Rhodium", 124, -46), +] + + +@pytest.mark.parametrize( + "energy_kev, expected_stripe, first_voltage, last_voltage", mirror_stripe_params +) +def test_adjust_mirror_stripe( + RE: RunEngine, + vfm_mirror_voltages: VFMMirrorVoltages, + vfm: FocusingMirrorWithStripes, + energy_kev, + expected_stripe, + first_voltage, + last_voltage, +): + with patch.object( + vfm_mirror_voltages, + "voltage_channels", + new_callable=_all_demands_accepted(vfm_mirror_voltages), + ): + vfm.stripe.set = MagicMock(return_value=NullStatus()) + vfm.apply_stripe.trigger = MagicMock() # type: ignore + parent = MagicMock() + parent.attach_mock(vfm.stripe.set, "stripe_set") + parent.attach_mock(vfm.apply_stripe.trigger, "apply_stripe") # type: ignore + + RE(adjust_mirror_stripe(energy_kev, vfm, vfm_mirror_voltages)) + + assert parent.method_calls[0] == ("stripe_set", (expected_stripe,)) + assert parent.method_calls[1][0] == "apply_stripe" + vfm_mirror_voltages.voltage_channels[0].set.assert_called_once_with( # type: ignore + first_voltage + ) + vfm_mirror_voltages.voltage_channels[7].set.assert_called_once_with( # type: ignore + last_voltage + ) + + +def test_adjust_dcm_pitch_roll_vfm_from_lut( + undulator_dcm: UndulatorDCM, + vfm: FocusingMirrorWithStripes, + vfm_mirror_voltages: VFMMirrorVoltages, + beamline_parameters: GDABeamlineParameters, + sim_run_engine: RunEngineSimulator, +): + sim_run_engine.add_handler_for_callback_subscribes() + sim_run_engine.add_handler( + "read", + lambda msg: {"dcm-bragg_in_degrees": {"value": 5.0}}, + "dcm-bragg_in_degrees", + ) + + messages = sim_run_engine.simulate_plan( + adjust_dcm_pitch_roll_vfm_from_lut(undulator_dcm, vfm, vfm_mirror_voltages, 7.5) + ) + + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == "dcm-pitch_in_mrad" + and abs(msg.args[0] - -0.75859) < 1e-5 + and msg.kwargs["group"] == "DCM_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "dcm-roll_in_mrad" + and abs(msg.args[0] - 4.0) < 1e-5 + and msg.kwargs["group"] == "DCM_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "dcm-offset_in_mm" + and msg.args == (25.6,) + and msg.kwargs["group"] == "DCM_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "vfm-stripe" + and msg.args == (MirrorStripe.RHODIUM,), + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "wait", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "trigger" and msg.obj.name == "vfm-apply_stripe", + ) + for channel, expected_voltage in ( + (0, 124), + (1, 114), + (2, 34), + (3, 49), + (4, 19), + (5, -116), + (6, 4), + (7, -46), + ): + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == f"vfm_mirror_voltages-voltage_channels-{channel}" + and msg.args == (expected_voltage,), + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "wait" and msg.kwargs["group"] == "DCM_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "vfm-x_mm" + and msg.args == (10.0,), + ) diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_manipulate_sample.py b/tests/unit_tests/hyperion/device_setup_plans/test_manipulate_sample.py new file mode 100644 index 000000000..eaa9152f9 --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_manipulate_sample.py @@ -0,0 +1,34 @@ +from unittest.mock import patch + +from bluesky.run_engine import RunEngine +from dodal.devices.aperturescatterguard import ( + AperturePosition, + AperturePositionGDANames, + ApertureScatterguard, +) + +from mx_bluesky.hyperion.device_setup_plans.manipulate_sample import ( + move_aperture_if_required, +) + + +async def test_move_aperture_goes_to_correct_position( + aperture_scatterguard: ApertureScatterguard, RE: RunEngine +): + with patch.object(aperture_scatterguard, "set") as mock_set: + RE( + move_aperture_if_required( + aperture_scatterguard, AperturePositionGDANames.LARGE_APERTURE + ) + ) + mock_set.assert_called_once_with( + AperturePosition.LARGE, + ) + + +async def test_move_aperture_does_nothing_when_none_selected( + aperture_scatterguard: ApertureScatterguard, RE: RunEngine +): + with patch.object(aperture_scatterguard, "set") as mock_set: + RE(move_aperture_if_required(aperture_scatterguard, None)) + mock_set.assert_not_called() diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_setup_oav.py b/tests/unit_tests/hyperion/device_setup_plans/test_setup_oav.py new file mode 100644 index 000000000..7f4474fa1 --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_setup_oav.py @@ -0,0 +1,78 @@ +from unittest.mock import MagicMock + +import pytest +from bluesky import plan_stubs as bps +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.devices.oav.oav_detector import OAV, OAVConfigParams +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from ophyd.signal import Signal +from ophyd.status import Status + +from mx_bluesky.hyperion.device_setup_plans.setup_oav import ( + pre_centring_setup_oav, +) + +ZOOM_LEVELS_XML = "tests/test_data/test_jCameraManZoomLevels.xml" +OAV_CENTRING_JSON = "tests/test_data/test_OAVCentring.json" +DISPLAY_CONFIGURATION = "tests/test_data/test_display.configuration" + + +@pytest.fixture +def oav() -> OAV: + oav = i03.oav(fake_with_ophyd_sim=True) + oav.parameters = OAVConfigParams(ZOOM_LEVELS_XML, DISPLAY_CONFIGURATION) + + oav.proc.port_name.sim_put("proc") # type: ignore + oav.cam.port_name.sim_put("CAM") # type: ignore + + oav.zoom_controller.zrst.set("1.0x") + oav.zoom_controller.onst.set("2.0x") + oav.zoom_controller.twst.set("3.0x") + oav.zoom_controller.thst.set("5.0x") + oav.zoom_controller.frst.set("7.0x") + oav.zoom_controller.fvst.set("9.0x") + return oav + + +@pytest.fixture +def mock_parameters(): + return OAVParameters("loopCentring", OAV_CENTRING_JSON) + + +@pytest.mark.parametrize( + "zoom, expected_plugin", + [ + ("1.0", "proc"), + ("7.0", "CAM"), + ], +) +def test_when_set_up_oav_with_different_zoom_levels_then_flat_field_applied_correctly( + zoom, + expected_plugin, + mock_parameters: OAVParameters, + oav: OAV, + ophyd_pin_tip_detection: PinTipDetection, +): + mock_parameters.zoom = zoom + + RE = RunEngine() + RE(pre_centring_setup_oav(oav, mock_parameters, ophyd_pin_tip_detection)) + assert oav.grid_snapshot.input_plugin.get() == expected_plugin + + +def test_when_set_up_oav_then_only_waits_on_oav_to_finish( + mock_parameters: OAVParameters, oav: OAV, ophyd_pin_tip_detection: PinTipDetection +): + """This test will hang if pre_centring_setup_oav waits too generally as my_waiting_device + never finishes moving""" + my_waiting_device = Signal(name="") + my_waiting_device.set = MagicMock(return_value=Status()) + + def my_plan(): + yield from bps.abs_set(my_waiting_device, 10, wait=False) + yield from pre_centring_setup_oav(oav, mock_parameters, ophyd_pin_tip_detection) + + RE = RunEngine() + RE(my_plan()) diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_setup_panda.py b/tests/unit_tests/hyperion/device_setup_plans/test_setup_panda.py new file mode 100644 index 000000000..20821726d --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_setup_panda.py @@ -0,0 +1,246 @@ +from datetime import datetime +from typing import NamedTuple +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +from bluesky.plan_stubs import null +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from dodal.common.types import UpdatingDirectoryProvider +from dodal.devices.fast_grid_scan import PandAGridScanParams +from ophyd_async.panda import SeqTrigger + +from mx_bluesky.hyperion.device_setup_plans.setup_panda import ( + MM_TO_ENCODER_COUNTS, + disarm_panda_for_gridscan, + set_panda_directory, + setup_panda_for_flyscan, +) + + +def get_smargon_speed(x_step_size_mm: float, time_between_x_steps_ms: float) -> float: + return x_step_size_mm / time_between_x_steps_ms + + +def run_simulating_setup_panda_functions( + plan: str, sim_run_engine: RunEngineSimulator, mock_load_device=MagicMock +): + num_of_sets = 0 + num_of_waits = 0 + mock_panda = MagicMock() + + def count_commands(msg): + nonlocal num_of_sets + nonlocal num_of_waits + if msg.command == "set": + num_of_sets += 1 + elif msg.command == "wait": + num_of_waits += 1 + + sim = RunEngineSimulator() + sim.add_handler(["set", "wait"], count_commands) + + if plan == "setup": + smargon_speed = get_smargon_speed(0.1, 1) + sim.simulate_plan( + setup_panda_for_flyscan( + mock_panda, + PandAGridScanParams(transmission_fraction=0.01), + 1, + 0.1, + 100.1, + smargon_speed, + ) + ) + elif plan == "disarm": + sim.simulate_plan(disarm_panda_for_gridscan(mock_panda)) + + return num_of_sets, num_of_waits + + +@patch("mx_bluesky.hyperion.device_setup_plans.setup_panda.load_device") +def test_setup_panda_performs_correct_plans(mock_load_device, sim_run_engine): + num_of_sets, num_of_waits = run_simulating_setup_panda_functions( + "setup", sim_run_engine, mock_load_device + ) + mock_load_device.assert_called_once() + assert num_of_sets == 8 + assert num_of_waits == 3 + + +class SeqRow(NamedTuple): + repeats: int + trigger: SeqTrigger + position: int + time1: int + outa1: int + time2: int + outa2: int + + +@pytest.mark.parametrize( + "x_steps, x_step_size, x_start, run_up_distance_mm, time_between_x_steps_ms, exposure_time_s", + [ + (10, 0.2, 0, 0.5, 10.001, 0.01), + (10, 0.5, -1, 0.05, 10.001, 0.01), + (1, 2, 1.2, 1, 100.001, 0.1), + (10, 2, -0.5, 3, 101, 0.1), + ], +) +def test_setup_panda_correctly_configures_table( + x_steps: int, + x_step_size: float, + x_start: float, + run_up_distance_mm: float, + time_between_x_steps_ms: float, + exposure_time_s: float, + sim_run_engine: RunEngineSimulator, + panda, +): + sample_velocity_mm_per_s = get_smargon_speed(x_step_size, time_between_x_steps_ms) + params = PandAGridScanParams( + x_steps=x_steps, + x_step_size=x_step_size, + x_start=x_start, + run_up_distance_mm=run_up_distance_mm, + transmission_fraction=0.01, + ) + + exposure_distance_mm = sample_velocity_mm_per_s * exposure_time_s + + msgs = sim_run_engine.simulate_plan( + setup_panda_for_flyscan( + panda, + params, + 0, + exposure_time_s, + time_between_x_steps_ms, + sample_velocity_mm_per_s, + ) + ) + + # ignore all loading operations related to loading saved panda state from yaml + msgs = [ + msg for msg in msgs if not msg.kwargs.get("group", "").startswith("load-phase") + ] + + assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "panda-pulse-1-width" + and msg.args[0] == exposure_time_s, + ) + + table_msg = [ + msg + for msg in msgs + if msg.command == "set" and msg.obj.name == "panda-seq-1-table" + ][0] + + table = table_msg.args[0] + + PULSE_WIDTH_US = 1 + SPACE_WIDTH_US = int(time_between_x_steps_ms * 1000 - PULSE_WIDTH_US) + expected_seq_rows: list[SeqRow] = [ + SeqRow(1, SeqTrigger.BITA_1, 0, 0, 0, 1, 0), + SeqRow( + x_steps, + SeqTrigger.POSA_GT, + int(params.x_start * MM_TO_ENCODER_COUNTS), + PULSE_WIDTH_US, + 1, + SPACE_WIDTH_US, + 0, + ), + ] + + exposure_distance_counts = exposure_distance_mm * MM_TO_ENCODER_COUNTS + expected_seq_rows.extend( + [ + SeqRow(1, SeqTrigger.BITA_1, 0, 0, 0, 1, 0), + SeqRow( + x_steps, + SeqTrigger.POSA_LT, + int( + (params.x_start + (params.x_steps - 1) * params.x_step_size) + * MM_TO_ENCODER_COUNTS + + exposure_distance_counts + ), + PULSE_WIDTH_US, + 1, + SPACE_WIDTH_US, + 0, + ), + ] + ) + + for key in SeqRow._fields: + np.testing.assert_array_equal( + table.get(key), + [getattr(row, key) for row in expected_seq_rows], + f"Sequence table for field {key} does not match", + ) + + +def test_wait_between_setting_table_and_arming_panda(RE: RunEngine): + bps_wait_done = False + + def handle_wait(*args, **kwargs): + nonlocal bps_wait_done + bps_wait_done = True + yield from null() + + def assert_set_table_has_been_waited_on(*args, **kwargs): + assert bps_wait_done + yield from null() + + with ( + patch( + "mx_bluesky.hyperion.device_setup_plans.setup_panda.arm_panda_for_gridscan", + MagicMock(side_effect=assert_set_table_has_been_waited_on), + ), + patch( + "mx_bluesky.hyperion.device_setup_plans.setup_panda.bps.wait", + MagicMock(side_effect=handle_wait), + ), + patch("mx_bluesky.hyperion.device_setup_plans.setup_panda.load_device"), + patch("mx_bluesky.hyperion.device_setup_plans.setup_panda.bps.abs_set"), + ): + RE( + setup_panda_for_flyscan( + MagicMock(), + PandAGridScanParams(transmission_fraction=0.01), + 1, + 0.1, + 101.1, + get_smargon_speed(0.1, 1), + ) + ) + + +# It also would be useful to have some system tests which check that (at least) +# all the blocks which were enabled on setup are also disabled on tidyup +def test_disarm_panda_disables_correct_blocks(sim_run_engine): + num_of_sets, num_of_waits = run_simulating_setup_panda_functions( + "disarm", sim_run_engine + ) + assert num_of_sets == 5 + assert num_of_waits == 1 + + +@patch("mx_bluesky.hyperion.device_setup_plans.setup_panda.get_directory_provider") +@patch("mx_bluesky.hyperion.device_setup_plans.setup_panda.datetime", spec=datetime) +def test_set_panda_directory( + mock_datetime, mock_get_directory_provider: MagicMock, tmp_path, RE +): + mock_directory_provider = MagicMock(spec=UpdatingDirectoryProvider) + mock_datetime.now = MagicMock( + return_value=datetime.fromisoformat("2024-08-11T15:59:23") + ) + mock_get_directory_provider.return_value = mock_directory_provider + + RE(set_panda_directory(tmp_path)) + mock_directory_provider.update.assert_called_with( + directory=tmp_path, suffix="_20240811155923" + ) diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_utils.py b/tests/unit_tests/hyperion/device_setup_plans/test_utils.py new file mode 100644 index 000000000..29adbf73d --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_utils.py @@ -0,0 +1,94 @@ +from unittest.mock import MagicMock + +import pytest +from bluesky import plan_stubs as bps +from bluesky.utils import FailedStatus +from dodal.beamlines import i03 +from ophyd.status import Status + +from mx_bluesky.hyperion.device_setup_plans.utils import ( + start_preparing_data_collection_then_do_plan, +) + + +@pytest.fixture() +def mock_eiger(): + eiger = i03.eiger(fake_with_ophyd_sim=True) + eiger.detector_params = MagicMock() + eiger.async_stage = MagicMock() + eiger.disarm_detector = MagicMock() + return eiger + + +class MyTestException(Exception): + pass + + +def test_given_plan_raises_when_exception_raised_then_eiger_disarmed_and_correct_exception_returned( + mock_eiger, RE +): + def my_plan(): + yield from bps.null() + raise MyTestException() + + eiger = mock_eiger + detector_motion = MagicMock() + + with pytest.raises(MyTestException): + RE( + start_preparing_data_collection_then_do_plan( + eiger, detector_motion, 100, my_plan() + ) + ) + + # Check detector was armed + eiger.async_stage.assert_called_once() + + eiger.disarm_detector.assert_called_once() + + +@pytest.fixture() +def null_plan(): + return bps.null() + + +def test_given_shutter_open_fails_then_eiger_disarmed_and_correct_exception_returned( + mock_eiger, null_plan, RE +): + detector_motion = MagicMock() + status = Status() + status.set_exception(MyTestException()) + detector_motion.z.set = MagicMock(return_value=status) + + with pytest.raises(FailedStatus) as e: + RE( + start_preparing_data_collection_then_do_plan( + mock_eiger, detector_motion, 100, null_plan + ) + ) + assert e.value.args[0] is status + + mock_eiger.async_stage.assert_called_once() + detector_motion.z.set.assert_called_once() + mock_eiger.disarm_detector.assert_called_once() + + +def test_given_detector_move_fails_then_eiger_disarmed_and_correct_exception_returned( + mock_eiger, null_plan, RE +): + detector_motion = MagicMock() + status = Status() + status.set_exception(MyTestException()) + detector_motion.shutter.set = MagicMock(return_value=status) + + with pytest.raises(FailedStatus) as e: + RE( + start_preparing_data_collection_then_do_plan( + mock_eiger, detector_motion, 100, null_plan + ) + ) + assert e.value.args[0] is status + + mock_eiger.async_stage.assert_called_once() + detector_motion.z.set.assert_called_once() + mock_eiger.disarm_detector.assert_called_once() diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_xbpm_feedback.py b/tests/unit_tests/hyperion/device_setup_plans/test_xbpm_feedback.py new file mode 100644 index 000000000..ce8fe99fa --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_xbpm_feedback.py @@ -0,0 +1,90 @@ +from unittest.mock import MagicMock + +import pytest +from bluesky import plan_stubs as bps +from bluesky.run_engine import RunEngine +from bluesky.utils import FailedStatus +from dodal.devices.xbpm_feedback import Pause +from ophyd.status import Status +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.device_setup_plans.xbpm_feedback import ( + transmission_and_xbpm_feedback_for_collection_decorator, +) + + +async def test_given_xpbm_checks_pass_when_plan_run_with_decorator_then_run_as_expected( + RE, + xbpm_feedback, + attenuator, +): + expected_transmission = 0.3 + + @transmission_and_xbpm_feedback_for_collection_decorator( + xbpm_feedback, attenuator, expected_transmission + ) + def my_collection_plan(): + read_transmission = yield from bps.rd(attenuator.actual_transmission) + assert read_transmission == expected_transmission + pause_feedback = yield from bps.rd(xbpm_feedback.pause_feedback) + assert pause_feedback == Pause.PAUSE + + set_mock_value(xbpm_feedback.pos_stable, True) # type: ignore + + RE = RunEngine() + RE(my_collection_plan()) + + assert await attenuator.actual_transmission.get_value() == 1.0 + assert await xbpm_feedback.pause_feedback.get_value() == Pause.RUN + + +async def test_given_xbpm_checks_fail_when_plan_run_with_decorator_then_plan_not_run( + RE, + xbpm_feedback, + attenuator, +): + mock = MagicMock() + + @transmission_and_xbpm_feedback_for_collection_decorator( + xbpm_feedback, attenuator, 0.1 + ) + def my_collection_plan(): + mock() + yield from bps.null() + + status = Status() + status.set_exception(Exception()) + xbpm_feedback.trigger = MagicMock(side_effect=lambda: status) + + RE = RunEngine() + with pytest.raises(FailedStatus): + RE(my_collection_plan()) + + mock.assert_not_called() + assert await attenuator.actual_transmission.get_value() == 1.0 + assert await xbpm_feedback.pause_feedback.get_value() == Pause.RUN + + +async def test_given_xpbm_checks_pass_and_plan_fails_when_plan_run_with_decorator_then_cleaned_up( + RE, + xbpm_feedback, + attenuator, +): + set_mock_value(xbpm_feedback.pos_stable, True) # type: ignore + + class MyException(Exception): + pass + + @transmission_and_xbpm_feedback_for_collection_decorator( + xbpm_feedback, attenuator, 0.1 + ) + def my_collection_plan(): + yield from bps.null() + raise MyException() + + RE = RunEngine() + with pytest.raises(MyException): + RE(my_collection_plan()) + + assert await attenuator.actual_transmission.get_value() == 1.0 + assert await xbpm_feedback.pause_feedback.get_value() == Pause.RUN diff --git a/tests/unit_tests/hyperion/device_setup_plans/test_zebra_setup.py b/tests/unit_tests/hyperion/device_setup_plans/test_zebra_setup.py new file mode 100644 index 000000000..d7056ce70 --- /dev/null +++ b/tests/unit_tests/hyperion/device_setup_plans/test_zebra_setup.py @@ -0,0 +1,81 @@ +from unittest.mock import MagicMock, call + +import pytest +from bluesky import plan_stubs as bps +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.devices.zebra import ( + IN3_TTL, + IN4_TTL, + OR1, + PC_PULSE, + TTL_DETECTOR, + TTL_SHUTTER, + I03Axes, + Zebra, +) + +from mx_bluesky.hyperion.device_setup_plans.setup_zebra import ( + bluesky_retry, + set_zebra_shutter_to_manual, + setup_zebra_for_gridscan, + setup_zebra_for_rotation, +) + + +@pytest.fixture +def zebra(): + RunEngine() + return i03.zebra(fake_with_ophyd_sim=True) + + +async def test_zebra_set_up_for_gridscan(RE, zebra: Zebra): + RE(setup_zebra_for_gridscan(zebra, wait=True)) + assert await zebra.output.out_pvs[TTL_DETECTOR].get_value() == IN3_TTL + assert await zebra.output.out_pvs[TTL_SHUTTER].get_value() == IN4_TTL + + +async def test_zebra_set_up_for_rotation(RE, zebra: Zebra): + RE(setup_zebra_for_rotation(zebra, wait=True)) + assert await zebra.pc.gate_trigger.get_value() == I03Axes.OMEGA.value + assert await zebra.pc.gate_width.get_value() == pytest.approx(360, 0.01) + + +async def test_zebra_cleanup(RE, zebra: Zebra): + RE(set_zebra_shutter_to_manual(zebra, wait=True)) + assert await zebra.output.out_pvs[TTL_DETECTOR].get_value() == PC_PULSE + assert await zebra.output.out_pvs[TTL_SHUTTER].get_value() == OR1 + + +class MyException(Exception): + pass + + +def test_when_first_try_fails_then_bluesky_retry_tries_again(RE, done_status): + mock_device = MagicMock() + + @bluesky_retry + def my_plan(value): + yield from bps.abs_set(mock_device, value) + + mock_device.set.side_effect = [MyException(), done_status] + + RE(my_plan(10)) + + assert mock_device.set.mock_calls == [call(10), call(10)] + + +def test_when_all_tries_fail_then_bluesky_retry_throws_error(RE, done_status): + mock_device = MagicMock() + + @bluesky_retry + def my_plan(value): + yield from bps.abs_set(mock_device, value) + + exception_2 = MyException() + mock_device.set.side_effect = [MyException(), exception_2] + + with pytest.raises(MyException) as e: + RE(my_plan(10)) + + assert e.value == exception_2 diff --git a/tests/unit_tests/hyperion/experiment_plans/__init__.py b/tests/unit_tests/hyperion/experiment_plans/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/experiment_plans/conftest.py b/tests/unit_tests/hyperion/experiment_plans/conftest.py new file mode 100644 index 000000000..78075a63c --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/conftest.py @@ -0,0 +1,207 @@ +from collections.abc import Callable +from functools import partial +from unittest.mock import MagicMock, patch + +import pytest +from bluesky.utils import Msg +from dodal.devices.fast_grid_scan import ZebraFastGridScan +from dodal.devices.oav.oav_detector import OAVConfigParams +from dodal.devices.synchrotron import SynchrotronMode +from dodal.devices.zocalo import ZocaloResults, ZocaloTrigger +from event_model import Event +from ophyd_async.core import DeviceCollector +from ophyd_async.core.async_status import AsyncStatus + +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_gridscan_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + + +def make_event_doc(data, descriptor="abc123") -> Event: + return { + "time": 0, + "timestamps": {"a": 0}, + "seq_num": 0, + "uid": "not so random uid", + "descriptor": descriptor, + "data": data, + } + + +BASIC_PRE_SETUP_DOC = { + "undulator-current_gap": 0, + "synchrotron-synchrotron_mode": SynchrotronMode.USER, + "s4_slit_gaps_xgap": 0, + "s4_slit_gaps_ygap": 0, + "smargon-x": 10.0, + "smargon-y": 20.0, + "smargon-z": 30.0, +} + +BASIC_POST_SETUP_DOC = { + "aperture_scatterguard-selected_aperture": { + "name": "Robot_load", + "GDA_name": "ROBOT_LOAD", + "radius_microns": None, + "location": (15, 16, 2, 18, 19), + }, + "attenuator-actual_transmission": 0, + "flux_flux_reading": 10, + "dcm-energy_in_kev": 11.105, +} + + +@pytest.fixture +def sim_run_engine_for_rotation(sim_run_engine): + sim_run_engine.add_handler( + "read", + lambda msg: {"values": {"value": SynchrotronMode.USER}}, + "synchrotron-synchrotron_mode", + ) + sim_run_engine.add_handler( + "read", + lambda msg: {"values": {"value": -1}}, + "synchrotron-top_up_start_countdown", + ) + sim_run_engine.add_handler( + "read", lambda msg: {"values": {"value": -1}}, "smargon_omega" + ) + return sim_run_engine + + +def mock_zocalo_trigger(zocalo: ZocaloResults, result): + @AsyncStatus.wrap + async def mock_complete(results): + await zocalo._put_results(results, {"dcid": 0, "dcgid": 0}) + + zocalo.trigger = MagicMock(side_effect=partial(mock_complete, result)) + + +def run_generic_ispyb_handler_setup( + ispyb_handler: GridscanISPyBCallback, + params: ThreeDGridScan, +): + """This is useful when testing 'run_gridscan_and_move(...)' because this stuff + happens at the start of the outer plan.""" + + ispyb_handler.active = True + ispyb_handler.activity_gated_start( + { + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + "hyperion_parameters": params.json(), + } # type: ignore + ) + ispyb_handler.activity_gated_descriptor( + {"uid": "123abc", "name": CONST.DESCRIPTORS.HARDWARE_READ_PRE} # type: ignore + ) + ispyb_handler.activity_gated_event( + make_event_doc( + BASIC_PRE_SETUP_DOC, + descriptor="123abc", + ) + ) + ispyb_handler.activity_gated_descriptor( + {"uid": "abc123", "name": CONST.DESCRIPTORS.HARDWARE_READ_DURING} # type: ignore + ) + ispyb_handler.activity_gated_event( + make_event_doc( + BASIC_POST_SETUP_DOC, + descriptor="abc123", + ) + ) + + +def modified_interactor_mock(assign_run_end: Callable | None = None): + mock = MagicMock(spec=ZocaloTrigger) + if assign_run_end: + mock.run_end = assign_run_end + return mock + + +def modified_store_grid_scan_mock(*args, dcids=(0, 0), dcgid=0, **kwargs): + mock = MagicMock(spec=StoreInIspyb) + mock.begin_deposition.return_value = IspybIds( + data_collection_ids=dcids, data_collection_group_id=dcgid + ) + mock.update_deposition.return_value = IspybIds( + data_collection_ids=dcids, data_collection_group_id=dcgid, grid_ids=(0, 0) + ) + return mock + + +@pytest.fixture +def mock_subscriptions(test_fgs_params): + with ( + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + modified_interactor_mock, + ), + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb.append_to_comment" + ), + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb.begin_deposition", + new=MagicMock( + return_value=IspybIds( + data_collection_ids=(0, 0), data_collection_group_id=0 + ) + ), + ), + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb.update_deposition", + new=MagicMock( + return_value=IspybIds( + data_collection_ids=(0, 0), + data_collection_group_id=0, + grid_ids=(0, 0), + ) + ), + ), + ): + nexus_callback, ispyb_callback = create_gridscan_callbacks() + ispyb_callback.ispyb = MagicMock(spec=StoreInIspyb) + + return (nexus_callback, ispyb_callback) + + +def fake_read(obj, initial_positions, _): + initial_positions[obj] = 0 + yield Msg("null", obj) + + +@pytest.fixture +def simple_beamline(detector_motion, oav, smargon, synchrotron, test_config_files, dcm): + magic_mock = MagicMock(autospec=True) + + with DeviceCollector(mock=True): + magic_mock.zocalo = ZocaloResults() + magic_mock.zebra_fast_grid_scan = ZebraFastGridScan("preifx", "fake_fgs") + + magic_mock.oav = oav + magic_mock.smargon = smargon + magic_mock.detector_motion = detector_motion + magic_mock.dcm = dcm + magic_mock.synchrotron = synchrotron + oav.zoom_controller.frst.set("7.5x") + oav.parameters = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + oav.parameters.update_on_zoom(7.5, 1024, 768) + return magic_mock + + +def assert_event(mock_call, expected): + actual = mock_call.args[0] + if "data" in actual: + actual = actual["data"] + for k, v in expected.items(): + assert actual[k] == v, f"Mismatch in key {k}, {actual} <=> {expected}" diff --git a/tests/unit_tests/hyperion/experiment_plans/test_experiment_registry.py b/tests/unit_tests/hyperion/experiment_plans/test_experiment_registry.py new file mode 100644 index 000000000..5747d0c0f --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_experiment_registry.py @@ -0,0 +1,31 @@ +from inspect import getfullargspec + +import mx_bluesky.hyperion.experiment_plans as plan_module +from mx_bluesky.hyperion.experiment_plans import __all__ as exposed_plans +from mx_bluesky.hyperion.experiment_plans.experiment_registry import ( + PLAN_REGISTRY, + do_nothing, +) +from mx_bluesky.hyperion.parameters.components import HyperionParameters + + +def test_experiment_registry_param_types(): + for plan in PLAN_REGISTRY.keys(): + assert issubclass(PLAN_REGISTRY[plan]["param_type"], HyperionParameters) + + +def test_exposed_plans_in_reg(): + for plan in exposed_plans: + assert plan in PLAN_REGISTRY.keys() + + +def test_param_types_in_registry_match_plan(): + for plan in exposed_plans: + plan_function = getattr(plan_module, plan) + plan_args = getfullargspec(plan_function) + param_arg_type = plan_args.annotations["parameters"] + assert PLAN_REGISTRY[plan]["param_type"].__name__ in param_arg_type + + +def test_do_nothing(): + do_nothing() diff --git a/tests/unit_tests/hyperion/experiment_plans/test_flyscan_xray_centre_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_flyscan_xray_centre_plan.py new file mode 100644 index 000000000..624f61e95 --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_flyscan_xray_centre_plan.py @@ -0,0 +1,1206 @@ +import random +import types +from pathlib import Path +from unittest.mock import DEFAULT, MagicMock, call, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import numpy as np +import pytest +from bluesky.run_engine import RunEngine, RunEngineResult +from bluesky.simulators import assert_message_and_return_remaining +from bluesky.utils import FailedStatus, Msg +from dodal.beamlines import i03 +from dodal.common.beamlines.beamline_utils import clear_device +from dodal.devices.aperturescatterguard import AperturePosition +from dodal.devices.detector.det_dim_constants import ( + EIGER_TYPE_EIGER2_X_16M, +) +from dodal.devices.fast_grid_scan import ZebraFastGridScan +from dodal.devices.synchrotron import SynchrotronMode +from dodal.devices.zocalo import ZocaloStartInfo +from ophyd.sim import NullStatus +from ophyd.status import Status +from ophyd_async.core import set_mock_value +from ophyd_async.panda._table import DatasetTable + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_pre_collection, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan import ( + FlyScanXRayCentreComposite, + SmargonSpeedException, + _get_feature_controlled, + flyscan_xray_centre, + kickoff_and_complete_gridscan, + run_gridscan, + run_gridscan_and_move, + wait_for_gridscan_valid, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_gridscan_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.logging_callback import ( + VerbosePlanExecutionLoggingCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback import ( + GridscanNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback import ( + ZocaloCallback, +) +from mx_bluesky.hyperion.external_interaction.config_server import FeatureFlags +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from tests.conftest import ( + RunEngineSimulator, + create_dummy_scan_spec, +) + +from ....system_tests.hyperion.external_interaction.conftest import ( + TEST_RESULT_LARGE, + TEST_RESULT_MEDIUM, + TEST_RESULT_SMALL, +) +from ..external_interaction.callbacks.conftest import TestData +from .conftest import ( + assert_event, + mock_zocalo_trigger, + modified_interactor_mock, + modified_store_grid_scan_mock, + run_generic_ispyb_handler_setup, +) + +ReWithSubs = tuple[RunEngine, tuple[GridscanNexusFileCallback, GridscanISPyBCallback]] + + +@pytest.fixture +def fgs_composite_with_panda_pcap(fake_fgs_composite: FlyScanXRayCentreComposite): + capture_table = DatasetTable(name=np.array(["name"]), hdf5_type=[]) + set_mock_value(fake_fgs_composite.panda.data.datasets, capture_table) + + return fake_fgs_composite + + +@pytest.fixture +def fgs_params_use_panda(test_fgs_params: ThreeDGridScan, feature_flags: FeatureFlags): + feature_flags.use_panda_for_gridscan = True + test_fgs_params.features = feature_flags + return test_fgs_params + + +@pytest.fixture(params=[True, False], ids=["panda", "zebra"]) +def test_fgs_params_panda_zebra( + request: pytest.FixtureRequest, + feature_flags: FeatureFlags, + test_fgs_params: ThreeDGridScan, +): + if request.param: + feature_flags.use_panda_for_gridscan = request.param + test_fgs_params.features = feature_flags + return test_fgs_params + + +@pytest.fixture +def ispyb_plan(test_fgs_params: ThreeDGridScan): + @bpp.set_run_key_decorator(CONST.PLAN.GRIDSCAN_OUTER) + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + "hyperion_parameters": test_fgs_params.json(), + } + ) + def standalone_read_hardware_for_ispyb( + und, syn, slits, robot, attn, fl, dcm, ap_sg, sm, det + ): + yield from read_hardware_pre_collection(und, syn, slits, robot, sm) + yield from read_hardware_during_collection(ap_sg, attn, fl, dcm, det) + + return standalone_read_hardware_for_ispyb + + +@pytest.fixture +def RE_with_subs( + RE: RunEngine, + mock_subscriptions: tuple[GridscanNexusFileCallback | GridscanISPyBCallback], +): + for cb in list(mock_subscriptions): + RE.subscribe(cb) + yield RE, mock_subscriptions + + +@pytest.fixture +def mock_ispyb(): + return MagicMock() + + +def _custom_msg(command_name: str): + return lambda *args, **kwargs: iter([Msg(command_name)]) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb", + modified_store_grid_scan_mock, +) +class TestFlyscanXrayCentrePlan: + td: TestData = TestData() + + def test_eiger2_x_16_detector_specified( + self, + test_fgs_params: ThreeDGridScan, + ): + assert ( + test_fgs_params.detector_params.detector_size_constants.det_type_string + == EIGER_TYPE_EIGER2_X_16M + ) + + def test_when_run_gridscan_called_then_generator_returned( + self, + ): + plan = run_gridscan(MagicMock(), MagicMock(), MagicMock()) + assert isinstance(plan, types.GeneratorType) + + def test_when_run_gridscan_called_ispyb_deposition_made_and_records_errors( + self, + RE: RunEngine, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params: ThreeDGridScan, + mock_ispyb: MagicMock, + ): + ispyb_callback = GridscanISPyBCallback() + RE.subscribe(ispyb_callback) + + error = None + with pytest.raises(FailedStatus) as exc: + with patch.object( + fake_fgs_composite.sample_motors.omega, "set" + ) as mock_set: + error = AssertionError("Test Exception") + mock_set.return_value = FailedStatus(error) + + RE( + ispyb_activation_wrapper( + flyscan_xray_centre(fake_fgs_composite, test_fgs_params), + test_fgs_params, + ) + ) + + assert exc.value.args[0] is error + ispyb_callback.ispyb.end_deposition.assert_called_once_with( # type: ignore + IspybIds(data_collection_group_id=0, data_collection_ids=(0, 0)), + "fail", + "Test Exception", + ) + + def test_read_hardware_for_ispyb_updates_from_ophyd_devices( + self, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params: ThreeDGridScan, + RE: RunEngine, + ispyb_plan, + ): + undulator_test_value = 1.234 + + set_mock_value(fake_fgs_composite.undulator.current_gap, undulator_test_value) + + synchrotron_test_value = SynchrotronMode.USER + set_mock_value( + fake_fgs_composite.synchrotron.synchrotron_mode, synchrotron_test_value + ) + + transmission_test_value = 0.01 + set_mock_value( + fake_fgs_composite.attenuator.actual_transmission, transmission_test_value + ) + + current_energy_kev_test_value = 12.05 + set_mock_value( + fake_fgs_composite.dcm.energy_in_kev.user_readback, + current_energy_kev_test_value, + ) + + xgap_test_value = 0.1234 + ygap_test_value = 0.2345 + ap_sg_test_value = { + "name": "Small", + "GDA_name": "SMALL_APERTURE", + "radius_microns": 20, + "location": (10, 11, 2, 13, 14), + } + fake_fgs_composite.s4_slit_gaps.xgap.user_readback.sim_put(xgap_test_value) # type: ignore + fake_fgs_composite.s4_slit_gaps.ygap.user_readback.sim_put(ygap_test_value) # type: ignore + flux_test_value = 10.0 + fake_fgs_composite.flux.flux_reading.sim_put(flux_test_value) # type: ignore + + RE( + bps.abs_set( + fake_fgs_composite.aperture_scatterguard, + AperturePosition.LARGE, + ) + ) + + test_ispyb_callback = PlanReactiveCallback(ISPYB_LOGGER) + test_ispyb_callback.active = True + + with patch.multiple( + test_ispyb_callback, + activity_gated_start=DEFAULT, + activity_gated_event=DEFAULT, + ): + RE.subscribe(test_ispyb_callback) + + RE( + ispyb_plan( + fake_fgs_composite.undulator, + fake_fgs_composite.synchrotron, + fake_fgs_composite.s4_slit_gaps, + fake_fgs_composite.robot, + fake_fgs_composite.attenuator, + fake_fgs_composite.flux, + fake_fgs_composite.dcm, + fake_fgs_composite.aperture_scatterguard, + fake_fgs_composite.smargon, + fake_fgs_composite.eiger, + ) + ) + # fmt: off + assert_event( + test_ispyb_callback.activity_gated_start.mock_calls[0], # pyright: ignore + { + "plan_name": "standalone_read_hardware_for_ispyb", + "subplan_name": "run_gridscan_move_and_tidy", + }, + ) + assert_event( + test_ispyb_callback.activity_gated_event.mock_calls[0], # pyright: ignore + { + "undulator-current_gap": undulator_test_value, + "synchrotron-synchrotron_mode": synchrotron_test_value.value, + "s4_slit_gaps_xgap": xgap_test_value, + "s4_slit_gaps_ygap": ygap_test_value, + }, + ) + assert_event( + test_ispyb_callback.activity_gated_event.mock_calls[1], # pyright: ignore + { + 'aperture_scatterguard-selected_aperture': ap_sg_test_value, + "attenuator-actual_transmission": transmission_test_value, + "flux_flux_reading": flux_test_value, + "dcm-energy_in_kev": current_energy_kev_test_value, + }, + ) + # fmt: on + + @patch( + "dodal.devices.aperturescatterguard.ApertureScatterguard._safe_move_within_datacollection_range", + return_value=NullStatus(), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + def test_results_adjusted_and_passed_to_move_xyz( + self, + move_x_y_z: MagicMock, + run_gridscan: MagicMock, + move_aperture: MagicMock, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + RE_with_subs: ReWithSubs, + ): + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + RE, _ = RE_with_subs + RE.subscribe(VerbosePlanExecutionLoggingCallback()) + + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, TEST_RESULT_LARGE) + RE( + run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + ) + + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, TEST_RESULT_MEDIUM) + RE( + run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + ) + + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, TEST_RESULT_SMALL) + RE( + run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + ) + + aperture_scatterguard = fgs_composite_with_panda_pcap.aperture_scatterguard + large = aperture_scatterguard._loaded_positions[AperturePosition.LARGE] + medium = aperture_scatterguard._loaded_positions[AperturePosition.MEDIUM] + ap_call_large = call(large.location) + ap_call_medium = call(medium.location) + + move_aperture.assert_has_calls( + [ap_call_large, ap_call_large, ap_call_medium], any_order=True + ) + + mv_call_large = call( + fgs_composite_with_panda_pcap.sample_motors, + 0.05, + pytest.approx(0.15), + 0.25, + wait=True, + ) + mv_call_medium = call( + fgs_composite_with_panda_pcap.sample_motors, + 0.05, + pytest.approx(0.15), + 0.25, + wait=True, + ) + move_x_y_z.assert_has_calls( + [mv_call_large, mv_call_large, mv_call_medium], any_order=True + ) + + @patch("bluesky.plan_stubs.abs_set", autospec=True) + def test_results_passed_to_move_motors( + self, + bps_abs_set: MagicMock, + test_fgs_params: ThreeDGridScan, + fake_fgs_composite: FlyScanXRayCentreComposite, + RE: RunEngine, + ): + from mx_bluesky.hyperion.device_setup_plans.manipulate_sample import move_x_y_z + + motor_position = test_fgs_params.FGS_params.grid_position_to_motor_position( + np.array([1, 2, 3]) + ) + RE(move_x_y_z(fake_fgs_composite.sample_motors, *motor_position)) + bps_abs_set.assert_has_calls( + [ + call( + fake_fgs_composite.sample_motors.x, + motor_position[0], + group="move_x_y_z", + ), + call( + fake_fgs_composite.sample_motors.y, + motor_position[1], + group="move_x_y_z", + ), + call( + fake_fgs_composite.sample_motors.z, + motor_position[2], + group="move_x_y_z", + ), + ], + any_order=True, + ) + + @patch( + "dodal.devices.aperturescatterguard.ApertureScatterguard._safe_move_within_datacollection_range", + return_value=NullStatus(), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + modified_interactor_mock, + ) + def test_individual_plans_triggered_once_and_only_once_in_composite_run( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + move_aperture: MagicMock, + RE_with_subs: ReWithSubs, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + ): + RE, (_, ispyb_cb) = RE_with_subs + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, test_fgs_params_panda_zebra + ) + + def wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + RE( + ispyb_activation_wrapper( + wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + run_gridscan.assert_called_once() + move_xyz.assert_called_once() + + @patch( + "dodal.devices.aperturescatterguard.ApertureScatterguard.set", + return_value=NullStatus(), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + async def test_when_gridscan_finished_then_smargon_stub_offsets_are_set_and_dev_shm_disabled( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + aperture_set: MagicMock, + RE_with_subs: ReWithSubs, + test_fgs_params_panda_zebra: ThreeDGridScan, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + ): + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, test_fgs_params_panda_zebra + ) + RE, (nexus_cb, ispyb_cb) = RE_with_subs + test_fgs_params_panda_zebra.features.set_stub_offsets = True + + fgs_composite_with_panda_pcap.eiger.odin.fan.dev_shm_enable.sim_put(1) # type: ignore + + def wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + RE( + ispyb_activation_wrapper( + wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + assert ( + await fgs_composite_with_panda_pcap.smargon.stub_offsets.center_at_current_position.proc.get_value() + == 1 + ) + assert fgs_composite_with_panda_pcap.eiger.odin.fan.dev_shm_enable.get() == 0 + + @patch( + "dodal.devices.aperturescatterguard.ApertureScatterguard.set", + return_value=NullStatus(), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + def test_when_gridscan_succeeds_ispyb_comment_appended_to( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + aperture_set: MagicMock, + RE_with_subs: ReWithSubs, + test_fgs_params_panda_zebra: ThreeDGridScan, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + ): + RE, (nexus_cb, ispyb_cb) = RE_with_subs + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + + def _wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + RE.subscribe(VerbosePlanExecutionLoggingCallback()) + + RE( + ispyb_activation_wrapper( + _wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + app_to_comment: MagicMock = ispyb_cb.ispyb.append_to_comment # type:ignore + app_to_comment.assert_called() + append_aperture_call = app_to_comment.call_args_list[0].args[1] + append_zocalo_call = app_to_comment.call_args_list[-1].args[1] + assert "Aperture:" in append_aperture_call + assert "Crystal 1: Strength 999999" in append_zocalo_call + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.check_topup_and_wait_if_necessary", + ) + def test_waits_for_motion_program( + self, + check_topup_and_wait, + RE: RunEngine, + test_fgs_params: ThreeDGridScan, + fake_fgs_composite: FlyScanXRayCentreComposite, + done_status: Status, + ): + fake_fgs_composite.eiger.unstage = MagicMock(return_value=done_status) + clear_device("zebra_fast_grid_scan") + fgs = i03.zebra_fast_grid_scan(fake_with_ophyd_sim=True) + fgs.KICKOFF_TIMEOUT = 0.1 + fgs.complete = MagicMock(return_value=done_status) + set_mock_value(fgs.motion_program.running, 1) + with pytest.raises(FailedStatus): + RE( + kickoff_and_complete_gridscan( + fgs, + fake_fgs_composite.eiger, + fake_fgs_composite.synchrotron, + [ + test_fgs_params.scan_points_first_grid, + test_fgs_params.scan_points_second_grid, + ], + test_fgs_params.scan_indices, + ) + ) + fgs.KICKOFF_TIMEOUT = 1 + set_mock_value(fgs.motion_program.running, 0) + set_mock_value(fgs.status, 1) + res = RE( + kickoff_and_complete_gridscan( + fgs, + fake_fgs_composite.eiger, + fake_fgs_composite.synchrotron, + [ + test_fgs_params.scan_points_first_grid, + test_fgs_params.scan_points_second_grid, + ], + test_fgs_params.scan_indices, + ) + ) + assert isinstance(res, RunEngineResult) + assert res.exit_status == "success" + clear_device("zebra_fast_grid_scan") + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + def test_when_gridscan_fails_ispyb_comment_appended_to( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + RE_with_subs: ReWithSubs, + test_fgs_params_panda_zebra: ThreeDGridScan, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + ): + RE, (nexus_cb, ispyb_cb) = RE_with_subs + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + + def wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, []) + RE( + ispyb_activation_wrapper( + wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + app_to_comment: MagicMock = ispyb_cb.ispyb.append_to_comment # type:ignore + app_to_comment.assert_called() + append_aperture_call = app_to_comment.call_args_list[0].args[1] + append_zocalo_call = app_to_comment.call_args_list[-1].args[1] + assert "Aperture:" in append_aperture_call + assert "Zocalo found no crystals in this gridscan" in append_zocalo_call + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.complete", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.kickoff", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.mv", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + def test_GIVEN_no_results_from_zocalo_WHEN_communicator_wait_for_results_called_THEN_fallback_centre_used( + self, + move_xyz: MagicMock, + mock_mv: MagicMock, + mock_kickoff: MagicMock, + mock_complete: MagicMock, + RE_with_subs: ReWithSubs, + test_fgs_params_panda_zebra: ThreeDGridScan, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + done_status: Status, + ): + RE, (nexus_cb, ispyb_cb) = RE_with_subs + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + fgs_composite_with_panda_pcap.eiger.unstage = MagicMock( + return_value=done_status + ) + initial_x_y_z = np.array( + [ + random.uniform(-0.5, 0.5), + random.uniform(-0.5, 0.5), + random.uniform(-0.5, 0.5), + ] + ) + set_mock_value( + fgs_composite_with_panda_pcap.smargon.x.user_readback, initial_x_y_z[0] + ) + set_mock_value( + fgs_composite_with_panda_pcap.smargon.y.user_readback, initial_x_y_z[1] + ) + set_mock_value( + fgs_composite_with_panda_pcap.smargon.z.user_readback, initial_x_y_z[2] + ) + + def wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, []) + RE( + ispyb_activation_wrapper( + wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + assert np.all(move_xyz.call_args[0][1:] == initial_x_y_z) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + async def test_given_gridscan_fails_to_centre_then_stub_offsets_not_set( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + RE: RunEngine, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + ): + class MoveException(Exception): + pass + + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + mock_zocalo_trigger(fgs_composite_with_panda_pcap.zocalo, []) + move_xyz.side_effect = MoveException() + + with pytest.raises(MoveException): + RE( + run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + ) + assert ( + await fgs_composite_with_panda_pcap.smargon.stub_offsets.center_at_current_position.proc.get_value() + == 0 + ) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.move_x_y_z", + autospec=True, + ) + async def test_given_setting_stub_offsets_disabled_then_stub_offsets_not_set( + self, + move_xyz: MagicMock, + run_gridscan: MagicMock, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + RE_with_subs: ReWithSubs, + done_status: Status, + ): + RE, (nexus_cb, ispyb_cb) = RE_with_subs + fgs_composite_with_panda_pcap.aperture_scatterguard.set = MagicMock( + return_value=done_status + ) + feature_controlled = _get_feature_controlled( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + ) + test_fgs_params_panda_zebra.features.set_stub_offsets = False + + def wrapped_gridscan_and_move(): + run_generic_ispyb_handler_setup(ispyb_cb, test_fgs_params_panda_zebra) + yield from run_gridscan_and_move( + fgs_composite_with_panda_pcap, + test_fgs_params_panda_zebra, + feature_controlled, + ) + + RE.subscribe(VerbosePlanExecutionLoggingCallback()) + + RE( + ispyb_activation_wrapper( + wrapped_gridscan_and_move(), test_fgs_params_panda_zebra + ) + ) + assert ( + await fgs_composite_with_panda_pcap.smargon.stub_offsets.center_at_current_position.proc.get_value() + == 0 + ) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.sleep", + autospec=True, + ) + def test_GIVEN_scan_already_valid_THEN_wait_for_GRIDSCAN_returns_immediately( + self, patch_sleep: MagicMock, RE: RunEngine + ): + test_fgs: ZebraFastGridScan = i03.zebra_fast_grid_scan(fake_with_ophyd_sim=True) + + set_mock_value(test_fgs.position_counter, 0) + set_mock_value(test_fgs.scan_invalid, False) + + RE(wait_for_gridscan_valid(test_fgs)) + + patch_sleep.assert_not_called() + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.sleep", + autospec=True, + ) + def test_GIVEN_scan_not_valid_THEN_wait_for_GRIDSCAN_raises_and_sleeps_called( + self, patch_sleep: MagicMock, RE: RunEngine + ): + test_fgs: ZebraFastGridScan = i03.zebra_fast_grid_scan(fake_with_ophyd_sim=True) + + set_mock_value(test_fgs.scan_invalid, True) + set_mock_value(test_fgs.position_counter, 0) + + with pytest.raises(WarningException): + RE(wait_for_gridscan_valid(test_fgs)) + + patch_sleep.assert_called() + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.abs_set", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.kickoff", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.complete", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.mv", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.wait_for_gridscan_valid", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.NexusWriter", + autospec=True, + spec_set=True, + ) + def test_when_grid_scan_ran_then_eiger_disarmed_before_zocalo_end( + self, + nexuswriter, + wait_for_valid, + mock_mv, + mock_complete, + mock_kickoff, + mock_abs_set, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params: ThreeDGridScan, + RE_with_subs: ReWithSubs, + ): + test_fgs_params.x_steps = 8 + test_fgs_params.y_steps = 10 + test_fgs_params.z_steps = 12 + RE, (nexus_cb, ispyb_cb) = RE_with_subs + # Put both mocks in a parent to easily capture order + mock_parent = MagicMock() + fake_fgs_composite.eiger.disarm_detector = mock_parent.disarm + + fake_fgs_composite.eiger.filewriters_finished = NullStatus() # type: ignore + fake_fgs_composite.eiger.odin.check_odin_state = MagicMock(return_value=True) + fake_fgs_composite.eiger.odin.file_writer.num_captured.sim_put(1200) # type: ignore + fake_fgs_composite.eiger.stage = MagicMock( + return_value=Status(None, None, 0, True, True) + ) + set_mock_value(fake_fgs_composite.xbpm_feedback.pos_stable, True) + + with ( + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter.create_nexus_file", + autospec=True, + ), + patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + lambda _: modified_interactor_mock(mock_parent.run_end), + ), + ): + [RE.subscribe(cb) for cb in (nexus_cb, ispyb_cb)] + RE( + ispyb_activation_wrapper( + flyscan_xray_centre(fake_fgs_composite, test_fgs_params), + test_fgs_params, + ) + ) + + mock_parent.assert_has_calls([call.disarm(), call.run_end(0), call.run_end(0)]) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.set_panda_directory", + side_effect=_custom_msg("set_panda_directory"), + ) + @patch( + "mx_bluesky.hyperion.device_setup_plans.setup_panda.arm_panda_for_gridscan", + new=MagicMock(side_effect=_custom_msg("arm_panda")), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.disarm_panda_for_gridscan", + new=MagicMock(side_effect=_custom_msg("disarm_panda")), + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.run_gridscan", + new=MagicMock(side_effect=_custom_msg("do_gridscan")), + ) + def test_flyscan_xray_centre_sets_directory_stages_arms_disarms_unstages_the_panda( + self, + mock_set_panda_directory: MagicMock, + done_status: Status, + fgs_composite_with_panda_pcap: FlyScanXRayCentreComposite, + fgs_params_use_panda: ThreeDGridScan, + sim_run_engine: RunEngineSimulator, + ): + sim_run_engine.add_handler("unstage", lambda _: done_status) + sim_run_engine.add_read_handler_for( + fgs_composite_with_panda_pcap.smargon.x.max_velocity, 10 + ) + + msgs = sim_run_engine.simulate_plan( + flyscan_xray_centre(fgs_composite_with_panda_pcap, fgs_params_use_panda) + ) + + mock_set_panda_directory.assert_called_with( + Path("/tmp/dls/i03/data/2024/cm31105-4/xraycentring/123456") + ) + + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "set_panda_directory" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "stage" and msg.obj.name == "panda" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "arm_panda" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "do_gridscan" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "disarm_panda" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "unstage" and msg.obj.name == "panda" + ) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.wait", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.complete", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.kickoff", + autospec=True, + ) + def test_fgs_arms_eiger_without_grid_detect( + self, + mock_kickoff, + mock_complete, + mock_wait, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + RE: RunEngine, + done_status: Status, + ): + feature_controlled = _get_feature_controlled( + fake_fgs_composite, test_fgs_params_panda_zebra + ) + fake_fgs_composite.eiger.unstage = MagicMock(return_value=done_status) + RE( + run_gridscan( + fake_fgs_composite, test_fgs_params_panda_zebra, feature_controlled + ) + ) + fake_fgs_composite.eiger.stage.assert_called_once() # type: ignore + fake_fgs_composite.eiger.unstage.assert_called_once() + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.kickoff", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.wait", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.complete", + autospec=True, + ) + def test_when_grid_scan_fails_then_detector_disarmed_and_correct_exception_returned( + self, + mock_complete, + mock_wait, + mock_kickoff, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + RE: RunEngine, + ): + class CompleteException(Exception): + pass + + feature_controlled = _get_feature_controlled( + fake_fgs_composite, + test_fgs_params_panda_zebra, + ) + mock_complete.side_effect = CompleteException() + + fake_fgs_composite.eiger.stage = MagicMock( + return_value=Status(None, None, 0, True, True) + ) + + fake_fgs_composite.eiger.odin.check_odin_state = MagicMock() + + fake_fgs_composite.eiger.disarm_detector = MagicMock() + fake_fgs_composite.eiger.disable_roi_mode = MagicMock() + + # Without the complete finishing we will not get all the images + fake_fgs_composite.eiger.ALL_FRAMES_TIMEOUT = 0.1 # type: ignore + + # Want to get the underlying completion error, not the one raised from unstage + with pytest.raises(CompleteException): + RE( + run_gridscan( + fake_fgs_composite, test_fgs_params_panda_zebra, feature_controlled + ) + ) + + fake_fgs_composite.eiger.disable_roi_mode.assert_called() + fake_fgs_composite.eiger.disarm_detector.assert_called() + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.kickoff", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.bps.complete", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, + ) + def test_kickoff_and_complete_gridscan_triggers_zocalo( + self, + mock_zocalo_trigger_class: MagicMock, + mock_complete: MagicMock, + mock_kickoff: MagicMock, + RE: RunEngine, + fake_fgs_composite: FlyScanXRayCentreComposite, + ): + id_1, id_2 = 100, 200 + + _, ispyb_cb = create_gridscan_callbacks() + ispyb_cb.active = True + ispyb_cb.ispyb = MagicMock() + ispyb_cb.params = MagicMock() + ispyb_cb.ispyb_ids.data_collection_ids = (id_1, id_2) + assert isinstance(zocalo_cb := ispyb_cb.emit_cb, ZocaloCallback) + zocalo_env = "dev_env" + + mock_zocalo_trigger_class.return_value = (mock_zocalo_trigger := MagicMock()) + + zocalo_cb.start( + {CONST.TRIGGER.ZOCALO: CONST.PLAN.DO_FGS, "zocalo_environment": zocalo_env} # type: ignore + ) + assert zocalo_cb.triggering_plan == CONST.PLAN.DO_FGS + + fake_fgs_composite.eiger.unstage = MagicMock() + fake_fgs_composite.eiger.odin.file_writer.id.sim_put("test/filename") # type: ignore + + x_steps, y_steps, z_steps = 10, 20, 30 + + RE.subscribe(ispyb_cb) + RE( + kickoff_and_complete_gridscan( + fake_fgs_composite.zebra_fast_grid_scan, + fake_fgs_composite.eiger, + fake_fgs_composite.synchrotron, + scan_points=create_dummy_scan_spec(x_steps, y_steps, z_steps), + scan_start_indices=[0, x_steps * y_steps], + ) + ) + + mock_zocalo_trigger_class.assert_called_once_with(zocalo_env) + + expected_start_infos = [ + ZocaloStartInfo(id_1, "test/filename", 0, x_steps * y_steps, 0), + ZocaloStartInfo( + id_2, "test/filename", x_steps * y_steps, x_steps * z_steps, 1 + ), + ] + + expected_start_calls = [ + call(expected_start_infos[0]), + call(expected_start_infos[1]), + ] + + assert mock_zocalo_trigger.run_start.call_count == 2 + assert mock_zocalo_trigger.run_start.mock_calls == expected_start_calls + + assert mock_zocalo_trigger.run_end.call_count == 2 + assert mock_zocalo_trigger.run_end.mock_calls == [call(id_1), call(id_2)] + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.check_topup_and_wait_if_necessary", + new=MagicMock(side_effect=lambda *_, **__: iter([Msg("check_topup")])), + ) + def test_read_hardware_during_collection_occurs_after_eiger_arm( + self, + fake_fgs_composite: FlyScanXRayCentreComposite, + test_fgs_params_panda_zebra: ThreeDGridScan, + sim_run_engine: RunEngineSimulator, + ): + feature_controlled = _get_feature_controlled( + fake_fgs_composite, test_fgs_params_panda_zebra + ) + sim_run_engine.add_handler( + "read", + lambda msg: {"values": {"value": SynchrotronMode.USER}}, + "synchrotron-synchrotron_mode", + ) + msgs = sim_run_engine.simulate_plan( + run_gridscan( + fake_fgs_composite, test_fgs_params_panda_zebra, feature_controlled + ) + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "stage" and msg.obj.name == "eiger" + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "kickoff" + and msg.obj == feature_controlled.fgs_motors, + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "create" + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "read" and msg.obj.name == "eiger_bit_depth", + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "save" + ) + + @patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.kickoff_and_complete_gridscan", + ) + def test_if_smargon_speed_over_limit_then_log_error( + self, + mock_kickoff_and_complete: MagicMock, + test_fgs_params_panda_zebra: ThreeDGridScan, + fake_fgs_composite: FlyScanXRayCentreComposite, + RE: RunEngine, + ): + test_fgs_params_panda_zebra.x_step_size_um = 10 + test_fgs_params_panda_zebra.detector_params.exposure_time = 0.01 + + feature_controlled = _get_feature_controlled( + fake_fgs_composite, + test_fgs_params_panda_zebra, + ) + + # this exception should only be raised if we're using the panda + try: + RE( + run_gridscan_and_move( + fake_fgs_composite, test_fgs_params_panda_zebra, feature_controlled + ) + ) + except SmargonSpeedException: + assert test_fgs_params_panda_zebra.features.use_panda_for_gridscan + else: + assert not test_fgs_params_panda_zebra.features.use_panda_for_gridscan diff --git a/tests/unit_tests/hyperion/experiment_plans/test_grid_detect_then_xray_centre_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_grid_detect_then_xray_centre_plan.py new file mode 100644 index 000000000..ea088621e --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_grid_detect_then_xray_centre_plan.py @@ -0,0 +1,330 @@ +from collections.abc import Generator +from typing import cast +from unittest.mock import ANY, MagicMock, patch + +import bluesky.plan_stubs as bps +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from bluesky.utils import Msg +from dodal.beamlines import i03 +from dodal.devices.aperturescatterguard import AperturePosition +from dodal.devices.backlight import BacklightPosition +from dodal.devices.eiger import EigerDetector +from dodal.devices.oav.oav_detector import OAVConfigParams +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan import ( + GridDetectThenXRayCentreComposite, + OavGridDetectionComposite, + detect_grid_and_do_gridscan, + grid_detect_then_xray_centre, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ( + GridScanWithEdgeDetect, + ThreeDGridScan, +) + +from ..conftest import OavGridSnapshotTestEvents + + +def _fake_grid_detection( + devices: OavGridDetectionComposite, + parameters: OAVParameters, + snapshot_template: str, + snapshot_dir: str, + grid_width_microns: float = 0, + box_size_um: float = 0.0, +): + oav = i03.oav(fake_with_ophyd_sim=True) + oav.grid_snapshot.box_width.put(635.00986) + # first grid detection: x * y + oav.grid_snapshot.num_boxes_x.put(10) + oav.grid_snapshot.num_boxes_y.put(4) + yield from bps.create(CONST.DESCRIPTORS.OAV_GRID_SNAPSHOT_TRIGGERED) + yield from bps.read(oav.grid_snapshot) + yield from bps.read(devices.smargon) + yield from bps.save() + + # second grid detection: x * z, so num_boxes_y refers to smargon z + oav.grid_snapshot.num_boxes_x.put(10) + oav.grid_snapshot.num_boxes_y.put(1) + yield from bps.create(CONST.DESCRIPTORS.OAV_GRID_SNAPSHOT_TRIGGERED) + yield from bps.read(oav.grid_snapshot) + yield from bps.read(devices.smargon) + yield from bps.save() + + +@pytest.fixture +def grid_detect_devices( + aperture_scatterguard: i03.ApertureScatterguard, + backlight: i03.Backlight, + detector_motion: i03.DetectorMotion, + smargon: Smargon, +): + return GridDetectThenXRayCentreComposite( + aperture_scatterguard=aperture_scatterguard, + attenuator=MagicMock(), + backlight=backlight, + detector_motion=detector_motion, + eiger=MagicMock(), + zebra_fast_grid_scan=MagicMock(), + flux=MagicMock(), + oav=MagicMock(), + pin_tip_detection=MagicMock(), + smargon=smargon, + synchrotron=MagicMock(), + s4_slit_gaps=MagicMock(), + undulator=MagicMock(), + xbpm_feedback=MagicMock(), + zebra=MagicMock(), + zocalo=MagicMock(), + panda=MagicMock(), + panda_fast_grid_scan=MagicMock(), + dcm=MagicMock(), + robot=MagicMock(), + ) + + +def test_full_grid_scan( + test_fgs_params: ThreeDGridScan, test_config_files: dict[str, str] +): + devices = MagicMock() + plan = grid_detect_then_xray_centre( + devices, + cast(GridScanWithEdgeDetect, test_fgs_params), + test_config_files["oav_config_json"], + ) + assert isinstance(plan, Generator) + + +@pytest.fixture +def grid_detect_devices_with_oav_config_params( + grid_detect_devices: GridDetectThenXRayCentreComposite, + test_config_files: dict[str, str], +) -> GridDetectThenXRayCentreComposite: + grid_detect_devices.oav.parameters = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + grid_detect_devices.oav.parameters.micronsPerXPixel = 0.806 + grid_detect_devices.oav.parameters.micronsPerYPixel = 0.806 + grid_detect_devices.oav.parameters.beam_centre_i = 549 + grid_detect_devices.oav.parameters.beam_centre_j = 347 + return grid_detect_devices + + +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.grid_detection_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.flyscan_xray_centre", + autospec=True, +) +async def test_detect_grid_and_do_gridscan( + mock_flyscan_xray_centre_plan: MagicMock, + mock_grid_detection_plan: MagicMock, + grid_detect_devices_with_oav_config_params: GridDetectThenXRayCentreComposite, + RE: RunEngine, + smargon: Smargon, + test_full_grid_scan_params: GridScanWithEdgeDetect, + test_config_files: dict, +): + mock_grid_detection_plan.side_effect = _fake_grid_detection + + with patch.object( + grid_detect_devices_with_oav_config_params.aperture_scatterguard, + "set", + MagicMock(), + ) as mock_aperture_scatterguard: + RE( + ispyb_activation_wrapper( + detect_grid_and_do_gridscan( + grid_detect_devices_with_oav_config_params, + parameters=test_full_grid_scan_params, + oav_params=OAVParameters( + "xrayCentring", test_config_files["oav_config_json"] + ), + ), + test_full_grid_scan_params, + ) + ) + # Verify we called the grid detection plan + mock_grid_detection_plan.assert_called_once() + + # Check backlight was moved OUT + assert ( + await grid_detect_devices_with_oav_config_params.backlight.position.get_value() + == BacklightPosition.OUT + ) + + # Check aperture was changed to SMALL + mock_aperture_scatterguard.assert_called_once_with(AperturePosition.SMALL) + + # Check we called out to underlying fast grid scan plan + mock_flyscan_xray_centre_plan.assert_called_once_with(ANY, ANY) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.grid_detection_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.flyscan_xray_centre", + autospec=True, +) +def test_when_full_grid_scan_run_then_parameters_sent_to_fgs_as_expected( + mock_flyscan_xray_centre_plan: MagicMock, + mock_grid_detection_plan: MagicMock, + eiger: EigerDetector, + grid_detect_devices_with_oav_config_params: GridDetectThenXRayCentreComposite, + RE: RunEngine, + test_full_grid_scan_params: GridScanWithEdgeDetect, + test_config_files: dict, + smargon: Smargon, +): + oav_params = OAVParameters("xrayCentring", test_config_files["oav_config_json"]) + + mock_grid_detection_plan.side_effect = _fake_grid_detection + + with ( + patch.object(eiger.do_arm, "set", MagicMock()), + patch.object( + grid_detect_devices_with_oav_config_params.aperture_scatterguard, + "set", + MagicMock(), + ), + ): + RE( + ispyb_activation_wrapper( + detect_grid_and_do_gridscan( + grid_detect_devices_with_oav_config_params, + parameters=test_full_grid_scan_params, + oav_params=oav_params, + ), + test_full_grid_scan_params, + ) + ) + + params: ThreeDGridScan = mock_flyscan_xray_centre_plan.call_args[0][1] + + assert params.detector_params.num_triggers == 50 + + assert params.FGS_params.x_axis.full_steps == 10 + assert params.FGS_params.y_axis.end == pytest.approx(1.511, 0.001) + + # Parameters can be serialized + params.json() + + +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.grid_detection_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.flyscan_xray_centre", + autospec=True, +) +def test_detect_grid_and_do_gridscan_does_not_activate_ispyb_callback( + mock_flyscan_xray_centre, + mock_grid_detection_plan, + grid_detect_devices_with_oav_config_params: GridDetectThenXRayCentreComposite, + sim_run_engine: RunEngineSimulator, + test_full_grid_scan_params: GridScanWithEdgeDetect, + test_config_files: dict[str, str], +): + mock_grid_detection_plan.return_value = iter([Msg("save_oav_grids")]) + sim_run_engine.add_handler_for_callback_subscribes() + sim_run_engine.add_callback_handler_for_multiple( + "save_oav_grids", + [ + [ + ( + "descriptor", + OavGridSnapshotTestEvents.test_descriptor_document_oav_snapshot, # type: ignore + ), + ( + "event", + OavGridSnapshotTestEvents.test_event_document_oav_snapshot_xy, # type: ignore + ), + ( + "event", + OavGridSnapshotTestEvents.test_event_document_oav_snapshot_xz, # type: ignore + ), + ] + ], + ) + + msgs = sim_run_engine.simulate_plan( + detect_grid_and_do_gridscan( + grid_detect_devices_with_oav_config_params, + test_full_grid_scan_params, + OAVParameters("xrayCentring", test_config_files["oav_config_json"]), + ) + ) + + activations = [ + msg + for msg in msgs + if msg.command == "open_run" + and "GridscanISPyBCallback" in msg.kwargs["activate_callbacks"] + ] + assert not activations + + +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.grid_detection_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.flyscan_xray_centre", + autospec=True, +) +def test_grid_detect_then_xray_centre_activates_ispyb_callback( + mock_flyscan_xray_centre, + mock_grid_detection_plan, + sim_run_engine: RunEngineSimulator, + grid_detect_devices_with_oav_config_params: GridDetectThenXRayCentreComposite, + test_full_grid_scan_params: GridScanWithEdgeDetect, + test_config_files: dict[str, str], +): + mock_grid_detection_plan.return_value = iter([Msg("save_oav_grids")]) + + sim_run_engine.add_handler_for_callback_subscribes() + sim_run_engine.add_callback_handler_for_multiple( + "save_oav_grids", + [ + [ + ( + "descriptor", + OavGridSnapshotTestEvents.test_descriptor_document_oav_snapshot, # type: ignore + ), + ( + "event", + OavGridSnapshotTestEvents.test_event_document_oav_snapshot_xy, # type: ignore + ), + ( + "event", + OavGridSnapshotTestEvents.test_event_document_oav_snapshot_xz, # type: ignore + ), + ] + ], + ) + msgs = sim_run_engine.simulate_plan( + grid_detect_then_xray_centre( + grid_detect_devices_with_oav_config_params, + test_full_grid_scan_params, + test_config_files["oav_config_json"], + ) + ) + + assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "open_run" + and "GridscanISPyBCallback" in msg.kwargs["activate_callbacks"], + ) diff --git a/tests/unit_tests/hyperion/experiment_plans/test_grid_detection_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_grid_detection_plan.py new file mode 100644 index 000000000..caf49d310 --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_grid_detection_plan.py @@ -0,0 +1,480 @@ +from typing import Any, Literal +from unittest.mock import DEFAULT, AsyncMock, MagicMock, patch + +import bluesky.preprocessors as bpp +import numpy as np +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator +from bluesky.utils import Msg +from dodal.beamlines import i03 +from dodal.devices.backlight import Backlight +from dodal.devices.oav.oav_detector import OAVConfigParams +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.oav.pin_image_recognition.utils import NONE_VALUE, SampleLocation +from dodal.devices.smargon import Smargon +from numpy._typing._array_like import NDArray +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.oav_grid_detection_plan import ( + OavGridDetectionComposite, + get_min_and_max_y_of_pin, + grid_detection_plan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.grid_detection_callback import ( + GridDetectionCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, + ispyb_activation_wrapper, +) +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +from .conftest import assert_event + + +@pytest.fixture +def fake_devices( + RE: RunEngine, + smargon: Smargon, + backlight: Backlight, + test_config_files: dict[str, str], +): + params = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + oav = i03.oav(wait_for_connection=False, fake_with_ophyd_sim=True, params=params) + oav.parameters.update_on_zoom = MagicMock() + oav.parameters.load_microns_per_pixel = MagicMock() + oav.parameters.micronsPerXPixel = 1.58 + oav.parameters.micronsPerYPixel = 1.58 + oav.parameters.beam_centre_i = 517 + oav.parameters.beam_centre_j = 350 + + oav.wait_for_connection() + + pin_tip_detection = i03.pin_tip_detection(fake_with_ophyd_sim=True) + pin_tip_detection._get_tip_and_edge_data = AsyncMock( + return_value=SampleLocation( + 8, + 5, + np.array([0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 4, 4, 3, 3, 2, 2, 3, 3, 4, 4]), + np.array([0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 6, 6, 7, 7, 8, 8, 7, 7, 6, 6]), + ) + ) + + oav.zoom_controller.zrst.set("1.0x") + oav.zoom_controller.onst.set("2.0x") + oav.zoom_controller.twst.set("3.0x") + oav.zoom_controller.thst.set("5.0x") + oav.zoom_controller.frst.set("7.0x") + oav.zoom_controller.fvst.set("9.0x") + + with ( + patch("dodal.devices.areadetector.plugins.MJPG.requests"), + patch("dodal.devices.areadetector.plugins.MJPG.Image") as mock_image_class, + ): + mock_image = MagicMock() + mock_image_class.open.return_value.__enter__.return_value = mock_image + + composite = OavGridDetectionComposite( + backlight=backlight, + oav=oav, + smargon=smargon, + pin_tip_detection=pin_tip_detection, + ) + + yield composite, mock_image + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +def test_grid_detection_plan_runs_and_triggers_snapshots( + RE: RunEngine, + test_config_files: dict[str, str], + fake_devices: tuple[OavGridDetectionComposite, MagicMock], +): + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + composite, image = fake_devices + + @bpp.run_decorator() + def decorated(): + yield from grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + ) + + RE(decorated()) + assert image.save.call_count == 6 + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +async def test_grid_detection_plan_gives_warning_error_if_tip_not_found( + RE: RunEngine, + test_config_files: dict[str, str], + fake_devices: tuple[OavGridDetectionComposite, MagicMock], +): + composite, _ = fake_devices + + set_mock_value(composite.pin_tip_detection.validity_timeout, 0.01) + composite.pin_tip_detection._get_tip_and_edge_data = AsyncMock( + return_value=SampleLocation( + *PinTipDetection.INVALID_POSITION, + np.array([]), + np.array([]), + ) + ) + + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + + with pytest.raises(WarningException) as excinfo: + RE( + grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + ) + ) + assert "No pin found" in excinfo.value.args[0] + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +def test_given_when_grid_detect_then_start_position_as_expected( + fake_devices: tuple[OavGridDetectionComposite, MagicMock], + RE: RunEngine, + test_config_files: dict[str, str], +): + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + box_size_um = 0.2 + composite, _ = fake_devices + composite.oav.parameters.micronsPerXPixel = 0.1 + composite.oav.parameters.micronsPerYPixel = 0.1 + composite.oav.parameters.beam_centre_i = 4 + composite.oav.parameters.beam_centre_j = 4 + box_size_y_pixels = box_size_um / composite.oav.parameters.micronsPerYPixel + + grid_param_cb = GridDetectionCallback(composite.oav.parameters) + RE.subscribe(grid_param_cb) + + @bpp.run_decorator() + def decorated(): + yield from grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + box_size_um=0.2, + ) + + RE(decorated()) + + gridscan_params = grid_param_cb.get_grid_parameters() + + assert gridscan_params["x_start_um"] == pytest.approx(0.0005) + assert gridscan_params["y_start_um"] == pytest.approx( + -0.0001 + - ( + (box_size_y_pixels / 2) * composite.oav.parameters.micronsPerYPixel * 1e-3 + ) # microns to mm + ) + assert gridscan_params["z_start_um"] == pytest.approx(-0.0001) + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +@patch( + "mx_bluesky.hyperion.experiment_plans.oav_grid_detection_plan.pre_centring_setup_oav", + new=MagicMock(), +) +def test_when_grid_detection_plan_run_twice_then_values_do_not_persist_in_callback( + fake_devices: tuple[OavGridDetectionComposite, MagicMock], + RE: RunEngine, + test_config_files: dict[str, str], +): + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + + composite, _ = fake_devices + + for _ in range(2): + + @bpp.run_decorator() + def decorated(): + yield from grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + ) + + RE(decorated()) + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +def test_when_grid_detection_plan_run_then_ispyb_callback_gets_correct_values( + fake_devices: tuple[OavGridDetectionComposite, MagicMock], + RE: RunEngine, + test_config_files: dict[str, str], + test_fgs_params: ThreeDGridScan, +): + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + composite, _ = fake_devices + composite.oav.parameters.micronsPerYPixel = 1.25 + composite.oav.parameters.micronsPerXPixel = 1.25 + cb = GridscanISPyBCallback() + RE.subscribe(cb) + + def decorated(): + yield from grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + ) + + with patch.multiple(cb, activity_gated_start=DEFAULT, activity_gated_event=DEFAULT): + RE(ispyb_activation_wrapper(decorated(), test_fgs_params)) + + assert_event( + cb.activity_gated_start.mock_calls[0], # pyright:ignore + {"activate_callbacks": ["GridscanISPyBCallback"]}, + ) + assert_event( + cb.activity_gated_event.mock_calls[0], # pyright: ignore + { + "oav_grid_snapshot_top_left_x": 8, + "oav_grid_snapshot_top_left_y": -6, + "oav_grid_snapshot_num_boxes_x": 8, + "oav_grid_snapshot_num_boxes_y": 2, + "oav_grid_snapshot_box_width": 16, + "oav_grid_snapshot_microns_per_pixel_x": 1.25, + "oav_grid_snapshot_microns_per_pixel_y": 1.25, + "oav_grid_snapshot_last_path_full_overlay": "tmp/test_0_grid_overlay.png", + "oav_grid_snapshot_last_path_outer": "tmp/test_0_outer_overlay.png", + "oav_grid_snapshot_last_saved_path": "tmp/test_0.png", + }, + ) + assert_event( + cb.activity_gated_event.mock_calls[1], # pyright:ignore + { + "oav_grid_snapshot_top_left_x": 8, + "oav_grid_snapshot_top_left_y": 2, + "oav_grid_snapshot_num_boxes_x": 8, + "oav_grid_snapshot_num_boxes_y": 1, + "oav_grid_snapshot_box_width": 16, + "oav_grid_snapshot_microns_per_pixel_x": 1.25, + "oav_grid_snapshot_microns_per_pixel_y": 1.25, + "oav_grid_snapshot_last_path_full_overlay": "tmp/test_90_grid_overlay.png", + "oav_grid_snapshot_last_path_outer": "tmp/test_90_outer_overlay.png", + "oav_grid_snapshot_last_saved_path": "tmp/test_90.png", + }, + ) + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +def test_when_grid_detection_plan_run_then_grid_detection_callback_gets_correct_values( + fake_devices: tuple[OavGridDetectionComposite, MagicMock], + RE: RunEngine, + test_config_files: dict[str, str], + test_fgs_params: ThreeDGridScan, +): + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + composite, _ = fake_devices + box_size_um = 20 + cb = GridDetectionCallback(composite.oav.parameters) + RE.subscribe(cb) + + def decorated(): + yield from grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=161.2, + ) + + RE(ispyb_activation_wrapper(decorated(), test_fgs_params)) + + my_grid_params = cb.get_grid_parameters() + + assert my_grid_params["x_start_um"] == pytest.approx(-0.7942199999999999) + assert my_grid_params["y_start_um"] == pytest.approx( + -0.53984 - (box_size_um * 1e-3 / 2) + ) + assert my_grid_params["y2_start_um"] == pytest.approx( + -0.53984 - (box_size_um * 1e-3 / 2) + ) + assert my_grid_params["z_start_um"] == pytest.approx(-0.53984) + assert my_grid_params["z2_start_um"] == pytest.approx(-0.53984) + assert my_grid_params["x_step_size_um"] == pytest.approx(0.02) + assert my_grid_params["y_step_size_um"] == pytest.approx(0.02) + assert my_grid_params["z_step_size_um"] == pytest.approx(0.02) + assert my_grid_params["x_steps"] == pytest.approx(9) + assert my_grid_params["y_steps"] == pytest.approx(2) + assert my_grid_params["z_steps"] == pytest.approx(1) + assert cb.x_step_size_mm == cb.y_step_size_mm == cb.z_step_size_mm == 0.02 + + +@pytest.mark.parametrize( + "odd", + [(True), (False)], +) +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch("bluesky.plan_stubs.sleep", new=MagicMock()) +@patch("mx_bluesky.hyperion.experiment_plans.oav_grid_detection_plan.LOGGER") +def test_when_detected_grid_has_odd_y_steps_then_add_a_y_step_and_shift_grid( + fake_logger: MagicMock, + fake_devices: tuple[OavGridDetectionComposite, MagicMock], + sim_run_engine: RunEngineSimulator, + test_config_files: dict[str, str], + odd: bool, +): + composite, _ = fake_devices + params = OAVParameters("loopCentring", test_config_files["oav_config_json"]) + grid_width_microns = 161.2 + box_size_um = 20 + assert composite.oav.parameters.micronsPerYPixel is not None + box_size_y_pixels = box_size_um / composite.oav.parameters.micronsPerYPixel + initial_min_y = 1 + + abs_sets: dict[str, list] = { + "grid_snapshot.top_left_y": [], + "grid_snapshot.num_boxes_y": [], + } + + def handle_read(msg: Msg): + if msg.obj.name == "pin_tip_detection-triggered_tip": + return {"values": {"value": (8, 5)}} + if msg.obj.name == "pin_tip_detection-triggered_top_edge": + top_edge = [0] * 20 + top_edge[19] = initial_min_y + return {"values": {"value": top_edge}} + elif msg.obj.name == "pin_tip_detection-triggered_bottom_edge": + bottom_edge = [0] * 20 + bottom_edge[19] = ( + 10 if odd else 25 + ) # Ensure y steps comes out as even or odd + return {"values": {"value": bottom_edge}} + else: + pass + + def record_set(msg: Msg): + if hasattr(msg.obj, "dotted_name"): + if msg.obj.dotted_name in abs_sets.keys(): + abs_sets[msg.obj.dotted_name].append(msg.args[0]) + + sim_run_engine.add_handler("set", record_set) + sim_run_engine.add_handler("read", handle_read) + sim_run_engine.simulate_plan( + grid_detection_plan( + composite, + parameters=params, + snapshot_dir="tmp", + snapshot_template="test_{angle}", + grid_width_microns=grid_width_microns, + ) + ) + + expected_min_y = initial_min_y - box_size_y_pixels / 2 if odd else initial_min_y + expected_y_steps = 2 + + if odd: + fake_logger.debug.assert_called_once_with( + f"Forcing number of rows in first grid to be even: Adding an extra row onto bottom of first grid and shifting grid upwards by {box_size_y_pixels/2}" + ) + else: + fake_logger.debug.assert_not_called() + + assert abs_sets["grid_snapshot.top_left_y"][0] == expected_min_y + assert abs_sets["grid_snapshot.num_boxes_y"][0] == expected_y_steps + + +@pytest.mark.parametrize( + "top, bottom, expected_min, expected_max", + [ + (np.array([1, 2, 5]), np.array([8, 9, 40]), 1, 40), + (np.array([9, 6, 10]), np.array([152, 985, 72]), 6, 985), + (np.array([5, 1]), np.array([999, 1056, 896, 10]), 1, 1056), + ], +) +def test_given_array_with_valid_top_and_bottom_then_min_and_max_as_expected( + top: NDArray[Any], + bottom: NDArray[Any], + expected_min: Literal[1] | Literal[6], + expected_max: Literal[40] | Literal[985] | Literal[1056], +): + min_y, max_y = get_min_and_max_y_of_pin(top, bottom, 100) + assert min_y == expected_min + assert max_y == expected_max + + +@pytest.mark.parametrize( + "top, bottom, expected_min, expected_max", + [ + (np.array([1, 2, NONE_VALUE]), np.array([8, 9, 40]), 1, 40), + (np.array([6, NONE_VALUE, 10]), np.array([152, 985, NONE_VALUE]), 6, 985), + (np.array([1, 5]), np.array([999, 1056, NONE_VALUE, 10]), 1, 1056), + ], +) +def test_given_array_with_some_invalid_top_and_bottom_sections_then_min_and_max_as_expected( + top: NDArray[Any], + bottom: NDArray[Any], + expected_min: Literal[1] | Literal[6], + expected_max: Literal[40] | Literal[985] | Literal[1056], +): + min_y, max_y = get_min_and_max_y_of_pin(top, bottom, 100) + assert min_y == expected_min + assert max_y == expected_max + + +@pytest.mark.parametrize( + "top, bottom, expected_min, expected_max", + [ + (np.array([NONE_VALUE, 0, NONE_VALUE]), np.array([100, NONE_VALUE]), 0, 100), + (np.array([NONE_VALUE, NONE_VALUE]), np.array([100, NONE_VALUE]), 0, 100), + (np.array([0, NONE_VALUE]), np.array([NONE_VALUE]), 0, 100), + ], +) +def test_given_array_with_all_invalid_top_and_bottom_sections_then_min_and_max_is_full_image( + top: NDArray[Any], + bottom: NDArray[Any], + expected_min: Literal[0], + expected_max: Literal[100], +): + min_y, max_y = get_min_and_max_y_of_pin(top, bottom, 100) + assert min_y == expected_min + assert max_y == expected_max diff --git a/tests/unit_tests/hyperion/experiment_plans/test_multi_rotation_scan_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_multi_rotation_scan_plan.py new file mode 100644 index 000000000..e8ca6417b --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_multi_rotation_scan_plan.py @@ -0,0 +1,448 @@ +from __future__ import annotations + +import json +import shutil +from collections.abc import Callable, Sequence +from itertools import takewhile +from math import ceil +from typing import Any +from unittest.mock import MagicMock, patch + +import h5py +import numpy as np +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.synchrotron import SynchrotronMode +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, + calculate_motion_profile, + multi_rotation_scan, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import StoreInIspyb +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import MultiRotationScan, RotationScan + +from ....conftest import ( + DocumentCapturer, + extract_metafile, + fake_read, + raw_params_from_file, +) +from ..external_interaction.conftest import * # noqa # for fixtures +from ..external_interaction.conftest import mx_acquisition_from_conn + +TEST_OFFSET = 1 +TEST_SHUTTER_OPENING_DEGREES = 2.5 + + +def test_multi_rotation_scan_params(): + raw_params = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_multi_rotation_scan_parameters.json" + ) + params = MultiRotationScan(**raw_params) + omega_starts = [s["omega_start_deg"] for s in raw_params["rotation_scans"]] + for i, scan in enumerate(params.single_rotation_scans): + assert scan.omega_start_deg == omega_starts[i] + assert scan.nexus_vds_start_img == params.scan_indices[i] + assert params.scan_indices + + +async def test_multi_rotation_plan_runs_multiple_plans_in_one_arm( + fake_create_rotation_devices: RotationScanComposite, + test_multi_rotation_params: MultiRotationScan, + sim_run_engine_for_rotation: RunEngineSimulator, + oav_parameters_for_rotation: OAVParameters, +): + smargon = fake_create_rotation_devices.smargon + omega = smargon.omega + set_mock_value( + fake_create_rotation_devices.synchrotron.synchrotron_mode, SynchrotronMode.USER + ) + msgs = sim_run_engine_for_rotation.simulate_plan( + multi_rotation_scan( + fake_create_rotation_devices, + test_multi_rotation_params, + oav_parameters_for_rotation, + ) + ) + + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "stage" and msg.obj.name == "eiger" + )[1:] + + msgs_within_arming = list( + takewhile( + lambda msg: msg.command != "unstage" + and (not msg.obj or msg.obj.name != "eiger"), + msgs, + ) + ) + + def _assert_set_seq_and_return_remaining(remaining, name_value_pairs): + for name, value in name_value_pairs: + try: + remaining = assert_message_and_return_remaining( + remaining, + lambda msg: msg.command == "set" + and msg.obj.name == name + and msg.args == (value,), + ) + except Exception as e: + raise Exception(f"Failed to find {name} being set to {value}") from e + return remaining + + for scan in test_multi_rotation_params.single_rotation_scans: + motion_values = calculate_motion_profile( + scan, + (await omega.acceleration_time.get_value()), + (await omega.max_velocity.get_value()), + ) + # moving to the start position + msgs_within_arming = _assert_set_seq_and_return_remaining( + msgs_within_arming, + [ + ("smargon-x", scan.x_start_um / 1000), # type: ignore + ("smargon-y", scan.y_start_um / 1000), # type: ignore + ("smargon-z", scan.z_start_um / 1000), # type: ignore + ("smargon-phi", scan.phi_start_deg), + ("smargon-chi", scan.chi_start_deg), + ], + ) + # arming the zebra + msgs_within_arming = assert_message_and_return_remaining( + msgs_within_arming, + lambda msg: msg.command == "set" and msg.obj.name == "zebra-pc-arm", + ) + # the final rel_set of omega to trigger the scan + assert_message_and_return_remaining( + msgs_within_arming, + lambda msg: msg.command == "set" + and msg.obj.name == "smargon-omega" + and msg.args + == ( + (scan.scan_width_deg + motion_values.shutter_opening_deg) + * motion_values.direction.multiplier, + ), + ) + + +def _run_multi_rotation_plan( + RE: RunEngine, + params: MultiRotationScan, + devices: RotationScanComposite, + callbacks: Sequence[Callable[[str, dict[str, Any]], Any]], + oav_params: OAVParameters, +): + for cb in callbacks: + RE.subscribe(cb) + with patch("bluesky.preprocessors.__read_and_stash_a_motor", fake_read): + RE(multi_rotation_scan(devices, params, oav_params)) + + +def test_full_multi_rotation_plan_docs_emitted( + RE: RunEngine, + test_multi_rotation_params: MultiRotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +): + callback_sim = DocumentCapturer() + _run_multi_rotation_plan( + RE, + test_multi_rotation_params, + fake_create_rotation_devices, + [callback_sim], + oav_parameters_for_rotation, + ) + docs = callback_sim.docs_received + + assert ( + outer_plan_start_doc := DocumentCapturer.assert_doc( + docs, "start", matches_fields=({"plan_name": "multi_rotation_scan"}) + ) + ) + outer_uid = outer_plan_start_doc[1]["uid"] + inner_run_docs = DocumentCapturer.get_docs_until( + docs, + "stop", + matches_fields=({"run_start": outer_uid, "exit_status": "success"}), + )[1:-1] + + for scan in test_multi_rotation_params.single_rotation_scans: + inner_run_docs = DocumentCapturer.get_docs_from( + inner_run_docs, + "start", + matches_fields={"subplan_name": "rotation_scan_with_cleanup"}, + ) + scan_docs = DocumentCapturer.get_docs_until( + inner_run_docs, + "stop", + matches_fields={"run_start": inner_run_docs[0][1]["uid"]}, + ) + assert DocumentCapturer.is_match( + scan_docs[0], + "start", + has_fields=["trigger_zocalo_on", "hyperion_parameters"], + ) + params = RotationScan(**json.loads(scan_docs[0][1]["hyperion_parameters"])) + assert params == scan + assert len(events := DocumentCapturer.get_matches(scan_docs, "event")) == 3 + DocumentCapturer.assert_events_and_data_in_order( + events, + [ + ["eiger_odin_file_writer_id"], + ["undulator-current_gap", "synchrotron-synchrotron_mode", "smargon-x"], + [ + "attenuator-actual_transmission", + "flux_flux_reading", + "dcm-energy_in_kev", + "eiger_bit_depth", + ], + ], + ) + inner_run_docs = DocumentCapturer.get_docs_from( + inner_run_docs, + "stop", + matches_fields={"run_start": inner_run_docs[0][1]["uid"]}, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback.NexusWriter" +) +def test_full_multi_rotation_plan_nexus_writer_called_correctly( + mock_nexus_writer: MagicMock, + RE: RunEngine, + test_multi_rotation_params: MultiRotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +): + callback = RotationNexusFileCallback() + _run_multi_rotation_plan( + RE, + test_multi_rotation_params, + fake_create_rotation_devices, + [callback], + oav_parameters_for_rotation, + ) + nexus_writer_calls = mock_nexus_writer.call_args_list + first_run_number = test_multi_rotation_params.detector_params.run_number + for call, rotation_params in zip( + nexus_writer_calls, + test_multi_rotation_params.single_rotation_scans, + strict=False, + ): + assert call.args[0] == rotation_params + assert call.kwargs == { + "omega_start_deg": rotation_params.omega_start_deg, + "chi_start_deg": rotation_params.chi_start_deg, + "phi_start_deg": rotation_params.phi_start_deg, + "vds_start_index": rotation_params.nexus_vds_start_img, + "full_num_of_images": test_multi_rotation_params.num_images, + "meta_data_run_number": first_run_number, + "rotation_direction": rotation_params.rotation_direction, + } + + +def test_full_multi_rotation_plan_nexus_files_written_correctly( + RE: RunEngine, + test_multi_rotation_params: MultiRotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, + tmpdir, +): + multi_params = test_multi_rotation_params + prefix = "multi_rotation_test" + test_data_dir = "tests/test_data/nexus_files/" + meta_file = f"{test_data_dir}rotation/ins_8_5_meta.h5.gz" + fake_datafile = f"{test_data_dir}fake_data.h5" + multi_params.file_name = prefix + multi_params.storage_directory = f"{tmpdir}" + meta_data_run_number = multi_params.detector_params.run_number + + data_filename_prefix = f"{prefix}_{meta_data_run_number}_" + meta_filename = f"{prefix}_{meta_data_run_number}_meta.h5" + + callback = RotationNexusFileCallback() + _run_multi_rotation_plan( + RE, + multi_params, + fake_create_rotation_devices, + [callback], + oav_parameters_for_rotation, + ) + + def _expected_dset_number(image_number: int): + # image numbers 0-999 are in dset 1, etc. + return int(ceil((image_number + 1) / 1000)) + + num_datasets = range( + 1, _expected_dset_number(multi_params.num_images - 1) + ) # the index of the last image is num_images - 1 + + for i in num_datasets: + shutil.copy( + fake_datafile, + f"{tmpdir}/{data_filename_prefix}{i:06d}.h5", + ) + extract_metafile( + meta_file, + f"{tmpdir}/{meta_filename}", + ) + for i, scan in enumerate(multi_params.single_rotation_scans): + with h5py.File(f"{tmpdir}/{prefix}_{i+1}.nxs", "r") as written_nexus_file: + # check links go to the right file: + detector_specific = written_nexus_file[ + "entry/instrument/detector/detectorSpecific" + ] + for field in ["software_version"]: + link = detector_specific.get(field, getlink=True) # type: ignore + assert link.filename == meta_filename # type: ignore + data_group = written_nexus_file["entry/data"] + for field in [f"data_{n:06d}" for n in num_datasets]: + link = data_group.get(field, getlink=True) # type: ignore + assert link.filename.startswith(data_filename_prefix) # type: ignore + + # check dataset starts and stops are correct: + assert isinstance(dataset := data_group["data"], h5py.Dataset) # type: ignore + assert dataset.is_virtual + assert dataset[scan.num_images - 1, 0, 0] == 0 + with pytest.raises(IndexError): + assert dataset[scan.num_images, 0, 0] == 0 + dataset_sources = dataset.virtual_sources() + expected_dset_start = _expected_dset_number(multi_params.scan_indices[i]) + expected_dset_end = _expected_dset_number(multi_params.scan_indices[i + 1]) + dset_start_name = dataset_sources[0].dset_name + dset_end_name = dataset_sources[-1].dset_name + assert dset_start_name.endswith(f"data_{expected_dset_start:06d}") + assert dset_end_name.endswith(f"data_{expected_dset_end:06d}") + + # check scan values are correct for each file: + assert isinstance( + chi := written_nexus_file["/entry/sample/sample_chi/chi"], h5py.Dataset + ) + assert chi[:] == scan.chi_start_deg + assert isinstance( + phi := written_nexus_file["/entry/sample/sample_phi/phi"], h5py.Dataset + ) + assert phi[:] == scan.phi_start_deg + assert isinstance( + omega := written_nexus_file["/entry/sample/sample_omega/omega"], + h5py.Dataset, + ) + omega = omega[:] + assert isinstance( + omega_end := written_nexus_file["/entry/sample/sample_omega/omega_end"], + h5py.Dataset, + ) + omega_end = omega_end[:] + assert len(omega) == scan.num_images + expected_omega_starts = np.linspace( + scan.omega_start_deg, + scan.omega_start_deg + + ((scan.num_images - 1) * multi_params.rotation_increment_deg), + scan.num_images, + ) + assert np.allclose(omega, expected_omega_starts) + expected_omega_ends = ( + expected_omega_starts + multi_params.rotation_increment_deg + ) + assert np.allclose(omega_end, expected_omega_ends) + assert isinstance( + omega_transform := written_nexus_file[ + "/entry/sample/transformations/omega" + ], + h5py.Dataset, + ) + assert isinstance(omega_vec := omega_transform.attrs["vector"], np.ndarray) + assert tuple(omega_vec) == (1.0 * scan.rotation_direction.multiplier, 0, 0) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb" +) +def test_full_multi_rotation_plan_ispyb_called_correctly( + mock_ispyb_store: MagicMock, + RE: RunEngine, + test_multi_rotation_params: MultiRotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +): + callback = RotationISPyBCallback() + mock_ispyb_store.return_value = MagicMock(spec=StoreInIspyb) + _run_multi_rotation_plan( + RE, + test_multi_rotation_params, + fake_create_rotation_devices, + [callback], + oav_parameters_for_rotation, + ) + ispyb_calls = mock_ispyb_store.call_args_list + for instantiation_call, ispyb_store_calls, _ in zip( + ispyb_calls, + [ # there should be 4 calls to the IspybStore per run + mock_ispyb_store.return_value.method_calls[i * 4 : (i + 1) * 4] + for i in range(len(test_multi_rotation_params.rotation_scans)) + ], + test_multi_rotation_params.single_rotation_scans, + strict=False, + ): + assert instantiation_call.args[0] == CONST.SIM.ISPYB_CONFIG + assert ispyb_store_calls[0][0] == "begin_deposition" + assert ispyb_store_calls[1][0] == "update_deposition" + assert ispyb_store_calls[2][0] == "update_deposition" + assert ispyb_store_calls[3][0] == "end_deposition" + + +def test_full_multi_rotation_plan_ispyb_interaction_end_to_end( + mock_ispyb_conn_multiscan, + RE: RunEngine, + test_multi_rotation_params: MultiRotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +): + number_of_scans = len(test_multi_rotation_params.rotation_scans) + callback = RotationISPyBCallback() + _run_multi_rotation_plan( + RE, + test_multi_rotation_params, + fake_create_rotation_devices, + [callback], + oav_parameters_for_rotation, + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn_multiscan) + assert mx.get_data_collection_group_params.call_count == number_of_scans + assert mx.get_data_collection_params.call_count == number_of_scans * 4 + for upsert_calls, rotation_params in zip( + [ # there should be 4 datacollection upserts per scan + mx.upsert_data_collection.call_args_list[i * 4 : (i + 1) * 4] + for i in range(len(test_multi_rotation_params.rotation_scans)) + ], + test_multi_rotation_params.single_rotation_scans, + strict=False, + ): + first_upsert_data = upsert_calls[0].args[0] + assert ( + first_upsert_data[12] - first_upsert_data[11] + == rotation_params.scan_width_deg + ) + assert first_upsert_data[15] == rotation_params.num_images + second_upsert_data = upsert_calls[1].args[0] + assert second_upsert_data[29].startswith("Sample position") + position_string = f"{rotation_params.x_start_um:.0f}, {rotation_params.y_start_um:.0f}, {rotation_params.z_start_um:.0f}" + assert position_string in second_upsert_data[29] + third_upsert_data = upsert_calls[2].args[0] + assert third_upsert_data[24] > 0 # resolution + assert third_upsert_data[52] > 0 # beam size + fourth_upsert_data = upsert_calls[3].args[0] + assert fourth_upsert_data[9] # timestamp + assert fourth_upsert_data[10] == "DataCollection Successful" diff --git a/tests/unit_tests/hyperion/experiment_plans/test_oav_snapshot_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_oav_snapshot_plan.py new file mode 100644 index 000000000..00c2e79dd --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_oav_snapshot_plan.py @@ -0,0 +1,142 @@ +import dataclasses +from datetime import datetime +from unittest.mock import patch + +import pytest +from bluesky.simulators import assert_message_and_return_remaining +from dodal.devices.aperturescatterguard import ApertureScatterguard +from dodal.devices.backlight import Backlight +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.oav.utils import ColorMode +from dodal.devices.smargon import Smargon + +from mx_bluesky.hyperion.experiment_plans.oav_snapshot_plan import ( + OAV_SNAPSHOT_SETUP_SHOT, + OavSnapshotComposite, + oav_snapshot_plan, +) +from mx_bluesky.hyperion.parameters.components import WithSnapshot +from mx_bluesky.hyperion.parameters.constants import DocDescriptorNames + +from ....conftest import raw_params_from_file + + +@pytest.fixture +def oav_snapshot_params(): + return WithSnapshot( + **raw_params_from_file( + "tests/test_data/parameter_json_files/test_oav_snapshot_params.json" + ) + ) + + +@dataclasses.dataclass +class CompositeImpl(OavSnapshotComposite): + smargon: Smargon + oav: OAV + aperture_scatterguard: ApertureScatterguard + backlight: Backlight + + +@pytest.fixture +def oav_snapshot_composite(smargon, oav, aperture_scatterguard, backlight): + oav.zoom_controller.fvst.sim_put("5.0x") + return CompositeImpl( + smargon=smargon, + oav=oav, + aperture_scatterguard=aperture_scatterguard, + backlight=backlight, + ) + + +@patch("mx_bluesky.hyperion.experiment_plans.oav_snapshot_plan.datetime", spec=datetime) +def test_oav_snapshot_plan_issues_rotations_and_generates_events( + mock_datetime, oav_snapshot_params, oav_snapshot_composite, sim_run_engine +): + mock_datetime.now.return_value = datetime.fromisoformat("2024-06-07T10:06:23") + msgs = sim_run_engine.simulate_plan( + oav_snapshot_plan( + oav_snapshot_composite, + oav_snapshot_params, + OAVParameters(oav_config_json="tests/test_data/test_OAVCentring.json"), + ) + ) + + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_cam_color_mode" + and msg.args[0] == ColorMode.RGB1, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_cam_acquire_period" + and msg.args[0] == 0.05, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_cam_acquire_time" + and msg.args[0] == 0.075, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_cam_gain" + and msg.args[0] == 1, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_zoom_controller" + and msg.args[0] == "5.0x", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_snapshot_directory" + and msg.args[0] == "/tmp/my_snapshots", + ) + for expected in [ + {"omega": 0, "filename": "100623_oav_snapshot_0"}, + {"omega": 90, "filename": "100623_oav_snapshot_90"}, + {"omega": 180, "filename": "100623_oav_snapshot_180"}, + {"omega": 270, "filename": "100623_oav_snapshot_270"}, + ]: + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "smargon-omega" + and msg.args[0] == expected["omega"] + and msg.kwargs["group"] == OAV_SNAPSHOT_SETUP_SHOT, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "oav_snapshot_filename" + and msg.args[0] == expected["filename"], + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "trigger" + and msg.obj.name == "oav_snapshot" + and msg.kwargs["group"] is None, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" and msg.kwargs["group"] is None, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "create" + and msg.kwargs["name"] + == DocDescriptorNames.OAV_ROTATION_SNAPSHOT_TRIGGERED, + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "read" and msg.obj.name == "oav_snapshot" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "save" + ) diff --git a/tests/unit_tests/hyperion/experiment_plans/test_optimise_attenuation_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_optimise_attenuation_plan.py new file mode 100644 index 000000000..8b3c14dac --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_optimise_attenuation_plan.py @@ -0,0 +1,386 @@ +import asyncio +from typing import Literal +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from ophyd.status import Status +from ophyd_async.core import set_mock_value +from ophyd_async.core.async_status import AsyncStatus + +from mx_bluesky.hyperion.experiment_plans import optimise_attenuation_plan +from mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan import ( + AttenuationOptimisationFailedException, + Direction, + OptimizeAttenuationComposite, + calculate_new_direction, + check_parameters, + deadtime_calc_new_transmission, + deadtime_optimisation, + is_counts_within_target, + is_deadtime_optimised, + total_counts_optimisation, +) +from mx_bluesky.hyperion.log import LOGGER + + +@pytest.fixture +def mock_emit(): + import logging + + test_handler = logging.Handler() + test_handler.emit = MagicMock() # type: ignore + LOGGER.addHandler(test_handler) + + yield test_handler.emit + + LOGGER.removeHandler(test_handler) + + +@pytest.fixture +def fake_composite() -> OptimizeAttenuationComposite: + sample_shutter = i03.sample_shutter( + fake_with_ophyd_sim=True, wait_for_connection=True + ) + xspress3mini = i03.xspress3mini(fake_with_ophyd_sim=True, wait_for_connection=True) + attenuator = i03.attenuator(fake_with_ophyd_sim=True, wait_for_connection=True) + + return OptimizeAttenuationComposite( + sample_shutter=sample_shutter, xspress3mini=xspress3mini, attenuator=attenuator + ) + + +def get_good_status(): + status = Status() + status.set_finished() + return status + + +@pytest.fixture +def fake_composite_mocked_sets(fake_composite: OptimizeAttenuationComposite): + with ( + patch.object( + fake_composite.xspress3mini, + "stage", + MagicMock(return_value=get_good_status()), + ), + patch.object( + fake_composite.sample_shutter, + "set", + MagicMock(return_value=get_good_status()), + ), + ): + yield fake_composite + + +def test_is_deadtime_optimised_returns_true_once_direction_is_flipped_and_deadtime_goes_back_above_threshold( + RE: RunEngine, +): + deadtime: float = 1 + direction = Direction.POSITIVE + for _ in range(5): + assert is_deadtime_optimised(deadtime, 0.5, 0.5, 1, Direction.POSITIVE) is False + direction = calculate_new_direction(direction, deadtime, 0.5) + deadtime -= 0.1 + assert direction == Direction.NEGATIVE + deadtime = 0.4 + assert is_deadtime_optimised(deadtime, 0.5, 0.5, 1, direction) is True + + +def test_is_deadtime_is_optimised_logs_warning_when_upper_transmission_limit_is_reached( + mock_emit: MagicMock, +): + is_deadtime_optimised(0.5, 0.4, 0.9, 0.9, Direction.POSITIVE) + latest_record = mock_emit.call_args.args[-1] + assert latest_record.levelname == "WARNING" + + +def test_total_counts_calc_new_transmission_raises_warning_on_high_transmission( + RE: RunEngine, + mock_emit: MagicMock, + fake_composite_mocked_sets: OptimizeAttenuationComposite, +): + set_mock_value( + fake_composite_mocked_sets.xspress3mini.dt_corrected_latest_mca[1], + np.array([1, 1, 1, 1, 1, 1]), + ) + RE( + total_counts_optimisation( + fake_composite_mocked_sets, + transmission=0.1, + low_roi=0, + high_roi=1, + lower_count_limit=0, + upper_count_limit=0.1, + target_count=1, + max_cycles=1, + upper_transmission_limit=0.1, + lower_transmission_limit=0, + ) + ) + + latest_record = mock_emit.call_args.args[-1] + assert latest_record.levelname == "WARNING" + + +@pytest.mark.parametrize( + "old_direction, deadtime, deadtime_threshold, new_direction", + [ + (Direction.POSITIVE, 0.1, 0.9, Direction.POSITIVE), + (Direction.NEGATIVE, 0.5, 0.4, Direction.NEGATIVE), + ], +) +def test_calculate_new_direction_gives_correct_value( + old_direction: Direction | Direction, + deadtime: float, + deadtime_threshold: float, + new_direction: Direction | Direction, +): + assert ( + calculate_new_direction(old_direction, deadtime, deadtime_threshold) + == new_direction + ) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan.do_device_optimise_iteration", + autospec=True, +) +def test_deadtime_optimisation_calculates_deadtime_correctly( + mock_do_device_optimise_iteration, + RE: RunEngine, + fake_composite: OptimizeAttenuationComposite, +): + set_mock_value(fake_composite.xspress3mini.channels[1].total_time, 100) + set_mock_value(fake_composite.xspress3mini.channels[1].reset_ticks, 101) + + with patch( + "mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan.is_deadtime_optimised", + autospec=True, + ) as mock_is_deadtime_optimised: + RE( + deadtime_optimisation( + fake_composite, + 0.5, + 2, + 0.01, + 1, + 0.1, + 1e-6, + ) + ) + mock_is_deadtime_optimised.assert_called_with( + 0.99, 0.01, 0.5, 0.1, Direction.POSITIVE + ) + + +@pytest.mark.parametrize( + "target, upper_limit, lower_limit, default_high_roi, default_low_roi,initial_transmission,upper_transmission,lower_transmission", + [ + (100, 90, 110, 1, 0, 0.5, 1, 0), + (50, 100, 20, 10, 20, 0.5, 1, 0), + (100, 100, 101, 10, 1, 0.5, 1, 0), + (10, 100, 0, 2, 1, 0.5, 0, 1), + (10, 100, 0, 2, 1, 0.5, 0.4, 0.1), + ], +) +def test_check_parameters_fail_on_out_of_range_parameters( + target: Literal[100] | Literal[50] | Literal[10], + upper_limit: Literal[90] | Literal[100], + lower_limit: Literal[110] | Literal[20] | Literal[101] | Literal[0], + default_high_roi: Literal[1] | Literal[10] | Literal[2], + default_low_roi: Literal[0] | Literal[20] | Literal[1], + initial_transmission: float, + upper_transmission: float | Literal[1] | Literal[0], + lower_transmission: float | Literal[0] | Literal[1], +): + with pytest.raises(ValueError): + check_parameters( + target, + upper_limit, + lower_limit, + default_high_roi, + default_low_roi, + initial_transmission, + upper_transmission, + lower_transmission, + ) + + +def test_check_parameters_runs_on_correct_params(): + assert check_parameters(10, 100, 0, 2, 1, 0.5, 1, 0) is None + + +@pytest.mark.parametrize( + "total_count, lower_limit, upper_limit", + [(100, 99, 100), (100, 100, 100), (50, 25, 1000)], +) +def test_is_counts_within_target_is_true( + total_count: Literal[100] | Literal[50], + lower_limit: Literal[99] | Literal[100] | Literal[25], + upper_limit: Literal[100] | Literal[1000], +): + assert is_counts_within_target(total_count, lower_limit, upper_limit) is True + + +@pytest.mark.parametrize( + "total_count, lower_limit, upper_limit", + [(100, 101, 101), (0, 1, 2), (1000, 2000, 3000)], +) +def test_is_counts_within_target_is_false( + total_count: Literal[100] | Literal[0] | Literal[1000], + lower_limit: Literal[101] | Literal[1] | Literal[2000], + upper_limit: Literal[101] | Literal[2] | Literal[3000], +): + assert is_counts_within_target(total_count, lower_limit, upper_limit) is False + + +def test_total_count_exception_raised_after_max_cycles_reached( + RE: RunEngine, fake_composite_mocked_sets: OptimizeAttenuationComposite +): + optimise_attenuation_plan.is_counts_within_target = MagicMock(return_value=False) + set_mock_value( + fake_composite_mocked_sets.xspress3mini.dt_corrected_latest_mca[1], + np.array([1, 1, 1, 1, 1, 1]), + ) + with pytest.raises(AttenuationOptimisationFailedException): + RE( + total_counts_optimisation( + fake_composite_mocked_sets, 1, 0, 10, 0, 5, 2, 1, 0, 0 + ) + ) + + +@pytest.mark.parametrize( + "direction, transmission, increment, upper_limit, lower_limit, new_transmission", + [ + (Direction.POSITIVE, 0.5, 2, 0.9, 1e-6, 0.9), + (Direction.POSITIVE, 0.1, 2, 0.9, 1e-6, 0.2), + (Direction.NEGATIVE, 0.8, 2, 0.9, 1e-6, 0.4), + ], +) +def test_deadtime_calc_new_transmission_gets_correct_value( + direction: Direction | Direction, + transmission: float, + increment: Literal[2], + upper_limit: float, + lower_limit: float, + new_transmission: float, +): + assert ( + deadtime_calc_new_transmission( + direction, transmission, increment, upper_limit, lower_limit + ) + == new_transmission + ) + + +def test_deadtime_calc_new_transmission_raises_error_on_low_ransmission(): + with pytest.raises(AttenuationOptimisationFailedException): + deadtime_calc_new_transmission(Direction.NEGATIVE, 1e-6, 2, 1, 1e-6) + + +def test_total_count_calc_new_transmission_raises_error_on_low_ransmission( + RE: RunEngine, fake_composite_mocked_sets: OptimizeAttenuationComposite +): + set_mock_value( + fake_composite_mocked_sets.xspress3mini.dt_corrected_latest_mca[1], + np.array([1, 1, 1, 1, 1, 1]), + ) + with pytest.raises(AttenuationOptimisationFailedException): + RE( + total_counts_optimisation( + fake_composite_mocked_sets, + 1e-6, + 0, + 1, + 10, + 20, + 1, + 1, + 0.5, + 0.1, + ) + ) + + +def test_total_counts_gets_within_target( + RE: RunEngine, + fake_composite_mocked_sets: OptimizeAttenuationComposite, +): + # For simplicity we just increase the data array each iteration. In reality it's the transmission value that affects the array + def update_data(value): + nonlocal iteration + iteration += 1 + set_mock_value( + fake_composite_mocked_sets.xspress3mini.dt_corrected_latest_mca[1], + np.array(([50, 50, 50, 50, 50]) * iteration), + ) + return AsyncStatus(asyncio.sleep(0)) + + fake_composite_mocked_sets.attenuator.set = update_data + iteration = 0 + + RE( + total_counts_optimisation( + fake_composite_mocked_sets, + transmission=1, + low_roi=0, + high_roi=4, + lower_count_limit=1000, + upper_count_limit=2000, + target_count=1500, + max_cycles=10, + upper_transmission_limit=1, + lower_transmission_limit=0, + ) + ) + + +@pytest.mark.parametrize( + "optimisation_type", + [("total_counts"), ("deadtime")], +) +@patch( + "mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan.total_counts_optimisation", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan.deadtime_optimisation", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.optimise_attenuation_plan.check_parameters", + autospec=True, +) +def test_optimisation_attenuation_plan_runs_correct_functions( + mock_check_parameters, + mock_deadtime_optimisation, + mock_total_counts_optimisation, + optimisation_type: Literal["total_counts"] | Literal["deadtime"], + RE: RunEngine, + fake_composite: OptimizeAttenuationComposite, +): + fake_composite.attenuator.set = MagicMock(return_value=get_good_status()) + fake_composite.xspress3mini.acquire_time.set = MagicMock( + return_value=get_good_status() + ) + + RE( + optimise_attenuation_plan.optimise_attenuation_plan( + fake_composite, + optimisation_type=optimisation_type, + ) + ) + + if optimisation_type == "total_counts": + mock_deadtime_optimisation.assert_not_called() + mock_total_counts_optimisation.assert_called_once() + else: + mock_total_counts_optimisation.assert_not_called() + mock_deadtime_optimisation.assert_called_once() + fake_composite.attenuator.set.assert_called_once() + mock_check_parameters.assert_called_once() + fake_composite.xspress3mini.acquire_time.set.assert_called_once() diff --git a/tests/unit_tests/hyperion/experiment_plans/test_pin_centre_then_xray_centre_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_pin_centre_then_xray_centre_plan.py new file mode 100644 index 000000000..000a2f0c3 --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_pin_centre_then_xray_centre_plan.py @@ -0,0 +1,191 @@ +from unittest.mock import MagicMock, patch + +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from bluesky.utils import Msg +from dodal.devices.detector.detector_motion import ShutterState +from dodal.devices.synchrotron import SynchrotronMode + +from mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan import ( + create_parameters_for_grid_detection, + pin_centre_then_xray_centre_plan, + pin_tip_centre_then_xray_centre, +) +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import PinTipCentreThenXrayCentre + +from ....conftest import raw_params_from_file + + +@pytest.fixture +def test_pin_centre_then_xray_centre_params(): + params = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_pin_centre_then_xray_centre_parameters.json" + ) + return PinTipCentreThenXrayCentre(**params) + + +def test_when_create_parameters_for_grid_detection_then_parameters_created( + test_pin_centre_then_xray_centre_params: PinTipCentreThenXrayCentre, +): + grid_detect_params = create_parameters_for_grid_detection( + test_pin_centre_then_xray_centre_params + ) + + assert grid_detect_params.exposure_time_s == 0.1 + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan.pin_tip_centre_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan.detect_grid_and_do_gridscan", + autospec=True, +) +def test_when_pin_centre_xray_centre_called_then_plan_runs_correctly( + mock_detect_and_do_gridscan: MagicMock, + mock_pin_tip_centre: MagicMock, + test_pin_centre_then_xray_centre_params: PinTipCentreThenXrayCentre, + test_config_files, +): + RE = RunEngine() + RE( + pin_centre_then_xray_centre_plan( + MagicMock(), test_pin_centre_then_xray_centre_params, test_config_files + ) + ) + + mock_detect_and_do_gridscan.assert_called_once() + mock_pin_tip_centre.assert_called_once() + + +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.GridDetectionCallback", +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan.pin_tip_centre_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.grid_detect_then_xray_centre_plan.grid_detection_plan", + autospec=True, +) +def test_when_pin_centre_xray_centre_called_then_detector_positioned( + mock_grid_detect: MagicMock, + mock_pin_tip_centre: MagicMock, + mock_grid_callback: MagicMock, + test_pin_centre_then_xray_centre_params: PinTipCentreThenXrayCentre, + simple_beamline, + test_config_files, + sim_run_engine: RunEngineSimulator, +): + mock_grid_callback.return_value.get_grid_parameters.return_value = { + "transmission_frac": 1.0, + "exposure_time_s": 0, + "x_start_um": 0, + "y_start_um": 0, + "y2_start_um": 0, + "z_start_um": 0, + "z2_start_um": 0, + "x_steps": 10, + "y_steps": 10, + "z_steps": 10, + "x_step_size_um": 0.1, + "y_step_size_um": 0.1, + "z_step_size_um": 0.1, + } + + sim_run_engine.add_handler_for_callback_subscribes() + + sim_run_engine.add_handler( + "read", + lambda msg_: {"values": {"value": SynchrotronMode.SHUTDOWN}}, + "synchrotron-synchrotron_mode", + ) + + def add_handlers_to_simulate_detector_motion(msg: Msg): + sim_run_engine.add_handler( + "read", + lambda msg_: {"values": {"value": int(ShutterState.OPEN)}}, + "detector_motion_shutter", + ) + sim_run_engine.add_handler( + "read", + lambda msg_: {"values": {"value": 1}}, + "detector_motion_z_motor_done_move", + ) + + sim_run_engine.add_wait_handler( + add_handlers_to_simulate_detector_motion, CONST.WAIT.GRID_READY_FOR_DC + ) + + messages = sim_run_engine.simulate_plan( + pin_tip_centre_then_xray_centre( + simple_beamline, + test_pin_centre_then_xray_centre_params, + test_config_files["oav_config_json"], + ), + ) + + messages = assert_message_and_return_remaining( + messages, lambda msg: msg.obj is simple_beamline.detector_motion.z + ) + assert messages[0].args[0] == 100 + assert messages[0].kwargs["group"] == CONST.WAIT.GRID_READY_FOR_DC + assert messages[1].obj is simple_beamline.detector_motion.shutter + assert messages[1].args[0] == ShutterState.OPEN + assert messages[1].kwargs["group"] == CONST.WAIT.GRID_READY_FOR_DC + messages = assert_message_and_return_remaining( + messages[2:], + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == CONST.WAIT.GRID_READY_FOR_DC, + ) + assert_message_and_return_remaining( + messages[2:], + lambda msg: msg.command == "open_run" + and msg.kwargs["subplan_name"] == "do_fgs", + ) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan.pin_tip_centre_plan", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_centre_then_xray_centre_plan.detect_grid_and_do_gridscan", + autospec=True, +) +def test_pin_centre_then_xray_centre_plan_activates_ispyb_callback_before_pin_tip_centre_plan( + mock_detect_grid_and_do_gridscan, + mock_pin_tip_centre_plan, + sim_run_engine: RunEngineSimulator, + test_pin_centre_then_xray_centre_params: PinTipCentreThenXrayCentre, + test_config_files, +): + mock_detect_grid_and_do_gridscan.return_value = iter( + [Msg("detect_grid_and_do_gridscan")] + ) + mock_pin_tip_centre_plan.return_value = iter([Msg("pin_tip_centre_plan")]) + + msgs = sim_run_engine.simulate_plan( + pin_centre_then_xray_centre_plan( + MagicMock(), + test_pin_centre_then_xray_centre_params, + test_config_files["oav_config_json"], + ) + ) + + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "open_run" + and "GridscanISPyBCallback" in msg.kwargs["activate_callbacks"], + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "pin_tip_centre_plan" + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "detect_grid_and_do_gridscan" + ) + assert_message_and_return_remaining(msgs, lambda msg: msg.command == "close_run") diff --git a/tests/unit_tests/hyperion/experiment_plans/test_pin_tip_centring.py b/tests/unit_tests/hyperion/experiment_plans/test_pin_tip_centring.py new file mode 100644 index 000000000..41fc4ec2a --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_pin_tip_centring.py @@ -0,0 +1,343 @@ +from functools import partial +from unittest.mock import AsyncMock, MagicMock, patch + +import numpy as np +import pytest +from bluesky.plan_stubs import null +from bluesky.run_engine import RunEngine, RunEngineResult +from dodal.devices.oav.oav_detector import OAV, OAVConfigParams +from dodal.devices.oav.pin_image_recognition import PinTipDetection +from dodal.devices.oav.pin_image_recognition.utils import SampleLocation +from dodal.devices.smargon import Smargon +from ophyd.sim import NullStatus +from ophyd_async.core import get_mock_put, set_mock_value + +from mx_bluesky.hyperion.device_setup_plans.smargon import ( + move_smargon_warn_on_out_of_range, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan import ( + DEFAULT_STEP_SIZE, + PinTipCentringComposite, + move_pin_into_view, + pin_tip_centre_plan, + trigger_and_return_pin_tip, +) + + +def get_fake_pin_values_generator(x, y): + yield from null() + return x, y + + +FAKE_EDGE_ARRAYS = np.ndarray([1, 2, 3]), np.ndarray([3, 4, 5]) + + +@pytest.fixture +def mock_pin_tip(pin_tip: PinTipDetection): + pin_tip._get_tip_and_edge_data = AsyncMock(return_value=pin_tip.INVALID_POSITION) + return pin_tip + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_given_the_pin_tip_is_already_in_view_when_get_tip_into_view_then_tip_returned_and_smargon_not_moved( + smargon: Smargon, oav: OAV, RE: RunEngine, mock_pin_tip: PinTipDetection +): + set_mock_value(smargon.x.user_readback, 0) + await mock_pin_tip.triggered_tip._backend.put((100, 200)) # type: ignore + + mock_pin_tip.trigger = MagicMock(return_value=NullStatus()) + + result = RE(move_pin_into_view(mock_pin_tip, smargon)) + + mock_pin_tip.trigger.assert_called_once() + assert await smargon.x.user_readback.get_value() == 0 + assert isinstance(result, RunEngineResult) + assert result.plan_result == (100, 200) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_given_no_tip_found_but_will_be_found_when_get_tip_into_view_then_smargon_moved_positive_and_tip_returned( + smargon: Smargon, oav: OAV, RE: RunEngine, mock_pin_tip: PinTipDetection +): + set_mock_value(mock_pin_tip.validity_timeout, 0.015) + set_mock_value(smargon.x.user_readback, 0) + + def set_pin_tip_when_x_moved(f, *args, **kwargs): + mock_pin_tip._get_tip_and_edge_data.return_value = SampleLocation( # type: ignore + 100, 200, *FAKE_EDGE_ARRAYS + ) + return f(*args, **kwargs) + + x_user_setpoint = get_mock_put(smargon.x.user_setpoint) + x_user_setpoint.side_effect = partial( + set_pin_tip_when_x_moved, x_user_setpoint.side_effect + ) + + result = RE(move_pin_into_view(mock_pin_tip, smargon)) + + assert await smargon.x.user_readback.get_value() == DEFAULT_STEP_SIZE + assert isinstance(result, RunEngineResult) + assert result.plan_result == (100, 200) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_given_tip_at_zero_but_will_be_found_when_get_tip_into_view_then_smargon_moved_negative_and_tip_returned( + smargon: Smargon, oav: OAV, RE: RunEngine, mock_pin_tip: PinTipDetection +): + mock_pin_tip._get_tip_and_edge_data.return_value = SampleLocation( # type: ignore + 0, 100, *FAKE_EDGE_ARRAYS + ) + set_mock_value(mock_pin_tip.validity_timeout, 0.15) + + set_mock_value(smargon.x.user_readback, 0) + + def set_pin_tip_when_x_moved(f, *args, **kwargs): + mock_pin_tip._get_tip_and_edge_data.return_value = SampleLocation( # type: ignore + 100, 200, *FAKE_EDGE_ARRAYS + ) + return f(*args, **kwargs) + + x_user_setpoint = get_mock_put(smargon.x.user_setpoint) + x_user_setpoint.side_effect = partial( + set_pin_tip_when_x_moved, x_user_setpoint.side_effect + ) + + result = RE(move_pin_into_view(mock_pin_tip, smargon)) + + assert await smargon.x.user_readback.get_value() == -DEFAULT_STEP_SIZE + assert result.plan_result == (100, 200) # type: ignore + + +async def test_trigger_and_return_pin_tip_works_for_AD_pin_tip_detection( + oav: OAV, RE: RunEngine, mock_pin_tip: PinTipDetection +): + mock_pin_tip._get_tip_and_edge_data.return_value = SampleLocation( # type: ignore + 200, 100, *FAKE_EDGE_ARRAYS + ) + set_mock_value(mock_pin_tip.validity_timeout, 0.15) + re_result = RE(trigger_and_return_pin_tip(mock_pin_tip)) + assert re_result.plan_result == (200, 100) # type: ignore + + +def test_trigger_and_return_pin_tip_works_for_ophyd_pin_tip_detection( + ophyd_pin_tip_detection: PinTipDetection, RE: RunEngine +): + mock_trigger_result = SampleLocation(100, 200, np.array([]), np.array([])) + ophyd_pin_tip_detection._get_tip_and_edge_data = AsyncMock( + return_value=mock_trigger_result + ) + re_result = RE(trigger_and_return_pin_tip(ophyd_pin_tip_detection)) + assert re_result.plan_result == (100, 200) # type: ignore + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.trigger_and_return_pin_tip" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_pin_tip_starting_near_negative_edge_doesnt_exceed_limit( + mock_trigger_and_return_tip: MagicMock, + smargon: Smargon, + oav: OAV, + RE: RunEngine, + pin_tip: PinTipDetection, +): + mock_trigger_and_return_tip.side_effect = [ + get_fake_pin_values_generator(0, 100), + get_fake_pin_values_generator(0, 100), + ] + + set_mock_value(smargon.x.user_setpoint, -1.8) + set_mock_value(smargon.x.user_readback, -1.8) + + with pytest.raises(WarningException): + RE(move_pin_into_view(pin_tip, smargon, max_steps=1)) + + assert await smargon.x.user_readback.get_value() == -2 + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.trigger_and_return_pin_tip" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_pin_tip_starting_near_positive_edge_doesnt_exceed_limit( + mock_trigger_and_return_pin_tip: MagicMock, + smargon: Smargon, + oav: OAV, + RE: RunEngine, + pin_tip: PinTipDetection, +): + mock_trigger_and_return_pin_tip.side_effect = [ + get_fake_pin_values_generator(None, None), + get_fake_pin_values_generator(None, None), + ] + set_mock_value(smargon.x.user_setpoint, 1.8) + set_mock_value(smargon.x.user_readback, 1.8) + + with pytest.raises(WarningException): + RE(move_pin_into_view(pin_tip, smargon, max_steps=1)) + + assert await smargon.x.user_readback.get_value() == 2 + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + new=MagicMock(), +) +async def test_given_no_tip_found_ever_when_get_tip_into_view_then_smargon_moved_positive_and_exception_thrown( + smargon: Smargon, oav: OAV, RE: RunEngine, pin_tip: PinTipDetection +): + set_mock_value(pin_tip.triggered_tip, pin_tip.INVALID_POSITION) + set_mock_value(pin_tip.validity_timeout, 0.01) + + set_mock_value(smargon.x.user_readback, 0) + + with pytest.raises(WarningException): + RE(move_pin_into_view(pin_tip, smargon)) + + assert await smargon.x.user_readback.get_value() == 1 + + +def test_given_moving_out_of_range_when_move_with_warn_called_then_warning_exception( + RE: RunEngine, smargon: Smargon +): + set_mock_value(smargon.x.high_limit_travel, 10) + + with pytest.raises(WarningException): + RE(move_smargon_warn_on_out_of_range(smargon, (100, 0, 0))) + + +def return_pixel(pixel, *args): + yield from null() + return pixel + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.wait_for_tip_to_be_found", + new=partial(return_pixel, (200, 200)), +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.get_move_required_so_that_beam_is_at_pixel", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.move_pin_into_view", + new=partial(return_pixel, (100, 100)), +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.pre_centring_setup_oav", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.move_smargon_warn_on_out_of_range", + autospec=True, +) +async def test_when_pin_tip_centre_plan_called_then_expected_plans_called( + move_smargon, + mock_sleep, + mock_setup_oav, + get_move: MagicMock, + smargon: Smargon, + test_config_files: dict[str, str], + RE: RunEngine, +): + set_mock_value(smargon.omega.user_readback, 0) + mock_oav: OAV = MagicMock(spec=OAV) + mock_oav.parameters = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + mock_oav.parameters.micronsPerXPixel = 2.87 + mock_oav.parameters.micronsPerYPixel = 2.87 + composite = PinTipCentringComposite( + backlight=MagicMock(), + oav=mock_oav, + smargon=smargon, + pin_tip_detection=MagicMock(), + ) + RE(pin_tip_centre_plan(composite, 50, test_config_files["oav_config_json"])) + + assert mock_setup_oav.call_count == 1 + + assert len(get_move.call_args_list) == 2 + + args, _ = get_move.call_args_list[0] + assert args[1] == (117, 100) + + assert await smargon.omega.user_readback.get_value() == 90 + + args, _ = get_move.call_args_list[1] + assert args[1] == (217, 200) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.wait_for_tip_to_be_found", + new=partial(return_pixel, (200, 200)), +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.get_move_required_so_that_beam_is_at_pixel", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.move_pin_into_view", +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.pre_centring_setup_oav", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.bps.sleep", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.pin_tip_centring_plan.move_smargon_warn_on_out_of_range", + autospec=True, +) +def test_given_pin_tip_detect_using_ophyd_when_pin_tip_centre_plan_called_then_expected_plans_called( + move_smargon, + mock_sleep, + mock_setup_oav, + mock_move_into_view, + get_move: MagicMock, + smargon: Smargon, + test_config_files: dict[str, str], + RE: RunEngine, +): + set_mock_value(smargon.omega.user_readback, 0) + mock_oav: OAV = MagicMock(spec=OAV) + mock_oav.parameters = OAVConfigParams( + test_config_files["zoom_params_file"], test_config_files["display_config"] + ) + mock_oav.parameters.micronsPerXPixel = 2.87 + mock_oav.parameters.micronsPerYPixel = 2.87 + mock_ophyd_pin_tip_detection = MagicMock() + composite = PinTipCentringComposite( + backlight=MagicMock(), + oav=mock_oav, + smargon=smargon, + pin_tip_detection=mock_ophyd_pin_tip_detection, + ) + mock_move_into_view.side_effect = partial(return_pixel, (100, 100)) + RE(pin_tip_centre_plan(composite, 50, test_config_files["oav_config_json"])) + + mock_move_into_view.assert_called_once_with(mock_ophyd_pin_tip_detection, smargon) + + assert mock_setup_oav.call_count == 1 diff --git a/tests/unit_tests/hyperion/experiment_plans/test_rotation_scan_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_rotation_scan_plan.py new file mode 100644 index 000000000..9eaf992bf --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_rotation_scan_plan.py @@ -0,0 +1,580 @@ +from __future__ import annotations + +from itertools import takewhile +from typing import Any +from unittest.mock import MagicMock, call, patch + +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from dodal.devices.aperturescatterguard import AperturePosition, ApertureScatterguard +from dodal.devices.backlight import BacklightPosition +from dodal.devices.oav.oav_parameters import OAVParameters +from dodal.devices.smargon import Smargon +from dodal.devices.synchrotron import SynchrotronMode +from dodal.devices.zebra import Zebra +from ophyd_async.core import get_mock_put + +from mx_bluesky.hyperion.experiment_plans.oav_snapshot_plan import ( + OAV_SNAPSHOT_GROUP, + OAV_SNAPSHOT_SETUP_GROUP, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationMotionProfile, + RotationScanComposite, + calculate_motion_profile, + rotation_scan, + rotation_scan_plan, +) +from mx_bluesky.hyperion.parameters.constants import CONST, DocDescriptorNames +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from .conftest import fake_read + +TEST_OFFSET = 1 +TEST_SHUTTER_OPENING_DEGREES = 2.5 + + +def do_rotation_main_plan_for_tests( + run_eng: RunEngine, + expt_params: RotationScan, + devices: RotationScanComposite, + motion_values: RotationMotionProfile, +): + with patch( + "bluesky.preprocessors.__read_and_stash_a_motor", + fake_read, + ): + run_eng( + rotation_scan_plan(devices, expt_params, motion_values), + ) + + +@pytest.fixture +def run_full_rotation_plan( + RE: RunEngine, + test_rotation_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +) -> RotationScanComposite: + with patch( + "bluesky.preprocessors.__read_and_stash_a_motor", + fake_read, + ): + RE( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ), + ) + return fake_create_rotation_devices + + +@pytest.fixture +def motion_values(test_rotation_params: RotationScan): + return calculate_motion_profile( + test_rotation_params, + 0.005, # time for acceleration + 222, + ) + + +def setup_and_run_rotation_plan_for_tests( + RE: RunEngine, + test_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + motion_values, +): + with patch("bluesky.plan_stubs.wait", autospec=True): + do_rotation_main_plan_for_tests( + RE, test_params, fake_create_rotation_devices, motion_values + ) + + return { + "RE_with_subs": RE, + "test_rotation_params": test_params, + "smargon": fake_create_rotation_devices.smargon, + "zebra": fake_create_rotation_devices.zebra, + } + + +@pytest.fixture +def setup_and_run_rotation_plan_for_tests_standard( + RE: RunEngine, + test_rotation_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + motion_values: RotationMotionProfile, +): + return setup_and_run_rotation_plan_for_tests( + RE, test_rotation_params, fake_create_rotation_devices, motion_values + ) + + +@pytest.fixture +def setup_and_run_rotation_plan_for_tests_nomove( + RE: RunEngine, + test_rotation_params_nomove: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + motion_values: RotationMotionProfile, +): + return setup_and_run_rotation_plan_for_tests( + RE, test_rotation_params_nomove, fake_create_rotation_devices, motion_values + ) + + +def test_rotation_scan_calculations(test_rotation_params: RotationScan): + test_rotation_params.exposure_time_s = 0.2 + test_rotation_params.omega_start_deg = 10 + + motion_values = calculate_motion_profile( + test_rotation_params, + 0.005, # time for acceleration + 224, + ) + + assert motion_values.direction == "Negative" + assert motion_values.start_scan_deg == 10 + + assert motion_values.speed_for_rotation_deg_s == 0.5 # 0.1 deg per 0.2 sec + assert motion_values.shutter_time_s == 0.6 + assert motion_values.shutter_opening_deg == 0.3 # distance moved in 0.6 s + + # 1.5 * distance moved in time for accel (fudge) + assert motion_values.acceleration_offset_deg == 0.00375 + assert motion_values.start_motion_deg == 10.00375 + + assert motion_values.total_exposure_s == 360 + assert motion_values.scan_width_deg == 180 + assert motion_values.distance_to_move_deg == -180.3075 + + +@patch( + "dodal.common.beamlines.beamline_utils.active_device_is_same_type", + lambda a, b: True, +) +@patch( + "mx_bluesky.hyperion.experiment_plans.rotation_scan_plan.rotation_scan_plan", + autospec=True, +) +def test_rotation_scan( + plan: MagicMock, + RE: RunEngine, + test_rotation_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + oav_parameters_for_rotation: OAVParameters, +): + composite = fake_create_rotation_devices + RE(rotation_scan(composite, test_rotation_params, oav_parameters_for_rotation)) + + composite.eiger.stage.assert_called() # type: ignore + composite.eiger.unstage.assert_called() # type: ignore + + +def test_rotation_plan_runs( + setup_and_run_rotation_plan_for_tests_standard: dict[str, Any], +) -> None: + RE: RunEngine = setup_and_run_rotation_plan_for_tests_standard["RE_with_subs"] + assert RE._exit_status == "success" + + +async def test_rotation_plan_zebra_settings( + setup_and_run_rotation_plan_for_tests_standard: dict[str, Any], +) -> None: + zebra: Zebra = setup_and_run_rotation_plan_for_tests_standard["zebra"] + params: RotationScan = setup_and_run_rotation_plan_for_tests_standard[ + "test_rotation_params" + ] + + assert await zebra.pc.gate_start.get_value() == params.omega_start_deg + assert await zebra.pc.pulse_start.get_value() == params.shutter_opening_time_s + + +async def test_full_rotation_plan_smargon_settings( + run_full_rotation_plan: RotationScanComposite, + test_rotation_params: RotationScan, +) -> None: + smargon: Smargon = run_full_rotation_plan.smargon + params: RotationScan = test_rotation_params + + test_max_velocity = await smargon.omega.max_velocity.get_value() + + omega_set: MagicMock = get_mock_put(smargon.omega.user_setpoint) + omega_velocity_set: MagicMock = get_mock_put(smargon.omega.velocity) + rotation_speed = params.rotation_increment_deg / params.exposure_time_s + + assert await smargon.phi.user_readback.get_value() == params.phi_start_deg + assert await smargon.chi.user_readback.get_value() == params.chi_start_deg + assert await smargon.x.user_readback.get_value() == params.x_start_um / 1000 # type: ignore + assert await smargon.y.user_readback.get_value() == params.y_start_um / 1000 # type: ignore + assert await smargon.z.user_readback.get_value() == params.z_start_um / 1000 # type: ignore + assert ( + # 4 * snapshots, restore omega, 1 * rotation sweep + omega_set.call_count == 4 + 1 + 1 + ) + # 1 to max vel in outer plan, 1 to max vel in setup_oav_snapshot_plan, 1 set before rotation, 1 restore in cleanup plan + assert omega_velocity_set.call_count == 4 + assert omega_velocity_set.call_args_list == [ + call(test_max_velocity, wait=True, timeout=10), + call(test_max_velocity, wait=True, timeout=10), + call(rotation_speed, wait=True, timeout=10), + call(test_max_velocity, wait=True, timeout=10), + ] + + +async def test_rotation_plan_moves_aperture_correctly( + run_full_rotation_plan: RotationScanComposite, + test_rotation_params: RotationScan, +) -> None: + aperture_scatterguard: ApertureScatterguard = ( + run_full_rotation_plan.aperture_scatterguard + ) + assert ( + await aperture_scatterguard.get_current_aperture_position() + == aperture_scatterguard._loaded_positions[AperturePosition.SMALL] + ) + + +async def test_rotation_plan_smargon_doesnt_move_xyz_if_not_given_in_params( + setup_and_run_rotation_plan_for_tests_nomove: dict[str, Any], +) -> None: + smargon: Smargon = setup_and_run_rotation_plan_for_tests_nomove["smargon"] + params: RotationScan = setup_and_run_rotation_plan_for_tests_nomove[ + "test_rotation_params" + ] + assert params.phi_start_deg is None + assert params.chi_start_deg is None + assert params.x_start_um is None + assert params.y_start_um is None + assert params.z_start_um is None + for motor in [smargon.phi, smargon.chi, smargon.x, smargon.y, smargon.z]: + assert await motor.user_readback.get_value() == 0 + get_mock_put(motor.user_setpoint).assert_not_called() # type: ignore + + +@patch( + "mx_bluesky.hyperion.experiment_plans.rotation_scan_plan._cleanup_plan", + autospec=True, +) +@patch("bluesky.plan_stubs.wait", autospec=True) +def test_cleanup_happens( + bps_wait: MagicMock, + cleanup_plan: MagicMock, + RE: RunEngine, + test_rotation_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + motion_values: RotationMotionProfile, + oav_parameters_for_rotation: OAVParameters, +): + class MyTestException(Exception): + pass + + failing_set = MagicMock( + side_effect=MyTestException("Experiment fails because this is a test") + ) + + with patch.object(fake_create_rotation_devices.smargon.omega, "set", failing_set): + # check main subplan part fails + with pytest.raises(MyTestException): + RE( + rotation_scan_plan( + fake_create_rotation_devices, test_rotation_params, motion_values + ) + ) + cleanup_plan.assert_not_called() + # check that failure is handled in composite plan + with pytest.raises(MyTestException) as exc: + RE( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + assert "Experiment fails because this is a test" in exc.value.args[0] + cleanup_plan.assert_called_once() + + +def test_rotation_plan_reads_hardware( + RE: RunEngine, + fake_create_rotation_devices: RotationScanComposite, + test_rotation_params, + motion_values, + sim_run_engine_for_rotation: RunEngineSimulator, +): + _add_sim_handlers_for_normal_operation( + fake_create_rotation_devices, sim_run_engine_for_rotation + ) + msgs = sim_run_engine_for_rotation.simulate_plan( + rotation_scan_plan( + fake_create_rotation_devices, test_rotation_params, motion_values + ) + ) + + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "create" + and msg.kwargs["name"] == CONST.DESCRIPTORS.HARDWARE_READ_PRE, + ) + msgs_in_event = list(takewhile(lambda msg: msg.command != "save", msgs)) + assert_message_and_return_remaining( + msgs_in_event, lambda msg: msg.command == "read" and msg.obj.name == "smargon-x" + ) + assert_message_and_return_remaining( + msgs_in_event, lambda msg: msg.command == "read" and msg.obj.name == "smargon-y" + ) + assert_message_and_return_remaining( + msgs_in_event, lambda msg: msg.command == "read" and msg.obj.name == "smargon-z" + ) + + +def test_rotation_scan_initialises_detector_distance_shutter_and_tx_fraction( + sim_run_engine: RunEngineSimulator, + fake_create_rotation_devices: RotationScanComposite, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.args[0] == 1 + and msg.obj.name == "detector_motion-shutter" + and msg.kwargs["group"] == "setup_senv", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.args[0] == test_rotation_params.detector_distance_mm + and msg.obj.name == "detector_motion-z" + and msg.kwargs["group"] == "setup_senv", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "attenuator" + and msg.args[0] == test_rotation_params.transmission_frac + and msg.kwargs["group"] == "setup_senv", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "attenuator" + and msg.args[0] == test_rotation_params.transmission_frac + and msg.kwargs["group"] == "setup_senv", + ) + assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "wait" and msg.kwargs["group"] == "setup_senv" + ) + + +def test_rotation_scan_moves_gonio_to_start_before_snapshots( + fake_create_rotation_devices: RotationScanComposite, + sim_run_engine: RunEngineSimulator, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == CONST.WAIT.MOVE_GONIO_TO_START, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == OAV_SNAPSHOT_SETUP_GROUP, + ) + + +def test_rotation_scan_moves_aperture_in_backlight_out_after_snapshots_before_rotation( + fake_create_rotation_devices: RotationScanComposite, + sim_run_engine: RunEngineSimulator, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "create" + and msg.kwargs["name"] == DocDescriptorNames.OAV_ROTATION_SNAPSHOT_TRIGGERED, + ) + msgs = assert_message_and_return_remaining(msgs, lambda msg: msg.command == "save") + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "aperture_scatterguard" + and msg.args[0] == AperturePosition.SMALL + and msg.kwargs["group"] == "setup_senv", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "backlight" + and msg.args[0] == BacklightPosition.OUT + and msg.kwargs["group"] == "setup_senv", + ) + assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "wait" and msg.kwargs["group"] == "setup_senv" + ) + + +def test_rotation_scan_resets_omega_waits_for_sample_env_complete_after_snapshots_before_hw_read( + fake_create_rotation_devices: RotationScanComposite, + sim_run_engine: RunEngineSimulator, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "create" + and msg.kwargs["name"] == DocDescriptorNames.OAV_ROTATION_SNAPSHOT_TRIGGERED, + ) + msgs = assert_message_and_return_remaining(msgs, lambda msg: msg.command == "save") + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "smargon-omega" + and msg.args[0] == test_rotation_params.omega_start_deg + and msg.kwargs["group"] == "move_to_rotation_start", + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == "move_to_rotation_start", + ) + assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "wait" and msg.kwargs["group"] == "setup_senv" + ) + assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == "move_to_rotation_start", + ) + assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "create" + and msg.kwargs["name"] == CONST.DESCRIPTORS.ZOCALO_HW_READ, + ) + + +def test_rotation_snapshot_setup_called_to_move_backlight_in_aperture_out_before_triggering( + fake_create_rotation_devices: RotationScanComposite, + sim_run_engine: RunEngineSimulator, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "backlight" + and msg.args[0] == BacklightPosition.IN + and msg.kwargs["group"] == OAV_SNAPSHOT_SETUP_GROUP, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "set" + and msg.obj.name == "aperture_scatterguard" + and msg.args[0] == AperturePosition.ROBOT_LOAD + and msg.kwargs["group"] == OAV_SNAPSHOT_SETUP_GROUP, + ) + msgs = assert_message_and_return_remaining( + msgs, + lambda msg: msg.command == "wait" + and msg.kwargs["group"] == OAV_SNAPSHOT_SETUP_GROUP, + ) + msgs = assert_message_and_return_remaining( + msgs, lambda msg: msg.command == "trigger" and msg.obj.name == "oav_snapshot" + ) + + +def test_rotation_scan_skips_init_backlight_aperture_and_snapshots_if_snapshot_params_specified( + fake_create_rotation_devices: RotationScanComposite, + sim_run_engine: RunEngineSimulator, + test_rotation_params: RotationScan, + oav_parameters_for_rotation: OAVParameters, +): + _add_sim_handlers_for_normal_operation(fake_create_rotation_devices, sim_run_engine) + test_rotation_params.snapshot_omegas_deg = None + + msgs = sim_run_engine.simulate_plan( + rotation_scan( + fake_create_rotation_devices, + test_rotation_params, + oav_parameters_for_rotation, + ) + ) + assert not [ + msg for msg in msgs if msg.kwargs.get("group", None) == OAV_SNAPSHOT_SETUP_GROUP + ] + assert not [ + msg for msg in msgs if msg.kwargs.get("group", None) == OAV_SNAPSHOT_GROUP + ] + assert ( + len( + [ + msg + for msg in msgs + if msg.command == "set" + and msg.obj.name == "smargon-omega" + and msg.kwargs["group"] == "move_to_rotation_start" + ] + ) + == 1 + ) + + +def _add_sim_handlers_for_normal_operation( + fake_create_rotation_devices, sim_run_engine: RunEngineSimulator +): + sim_run_engine.add_handler( + "read", + lambda msg: {"values": {"value": SynchrotronMode.USER}}, + "synchrotron-synchrotron_mode", + ) + sim_run_engine.add_handler( + "read", + lambda msg: {"values": {"value": -1}}, + "synchrotron-top_up_start_countdown", + ) + sim_run_engine.add_handler( + "read", lambda msg: {"smargon-omega": {"value": -1}}, "smargon-omega" + ) diff --git a/tests/unit_tests/hyperion/experiment_plans/test_set_energy_plan.py b/tests/unit_tests/hyperion/experiment_plans/test_set_energy_plan.py new file mode 100644 index 000000000..541594972 --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_set_energy_plan.py @@ -0,0 +1,76 @@ +from unittest.mock import patch + +import pytest +from bluesky.simulators import assert_message_and_return_remaining +from bluesky.utils import Msg +from dodal.devices.xbpm_feedback import Pause + +from mx_bluesky.hyperion.experiment_plans.set_energy_plan import ( + SetEnergyComposite, + set_energy_plan, +) + + +@pytest.fixture() +def set_energy_composite( + attenuator, dcm, undulator_dcm, vfm, vfm_mirror_voltages, xbpm_feedback +): + composite = SetEnergyComposite( + vfm, + vfm_mirror_voltages, + dcm, + undulator_dcm, + xbpm_feedback, + attenuator, + ) + return composite + + +@patch( + "mx_bluesky.hyperion.experiment_plans.set_energy_plan.dcm_pitch_roll_mirror_adjuster.adjust_dcm_pitch_roll_vfm_from_lut", + return_value=iter([Msg("adjust_dcm_pitch_roll_vfm_from_lut")]), +) +def test_set_energy( + mock_dcm_pra, + sim_run_engine, + set_energy_composite, +): + messages = sim_run_engine.simulate_plan(set_energy_plan(11.1, set_energy_composite)) + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == "xbpm_feedback-pause_feedback" + and msg.args == (Pause.PAUSE,), + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "attenuator" + and msg.args == (0.1,), + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "undulator_dcm" + and msg.args == (11.1,) + and msg.kwargs["group"] == "UNDULATOR_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], lambda msg: msg.command == "adjust_dcm_pitch_roll_vfm_from_lut" + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "wait" and msg.kwargs["group"] == "UNDULATOR_GROUP", + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "xbpm_feedback-pause_feedback" + and msg.args == (Pause.RUN,), + ) + messages = assert_message_and_return_remaining( + messages[1:], + lambda msg: msg.command == "set" + and msg.obj.name == "attenuator" + and msg.args == (1.0,), + ) diff --git a/tests/unit_tests/hyperion/experiment_plans/test_wait_for_robot_load_then_centre.py b/tests/unit_tests/hyperion/experiment_plans/test_wait_for_robot_load_then_centre.py new file mode 100644 index 000000000..254caf554 --- /dev/null +++ b/tests/unit_tests/hyperion/experiment_plans/test_wait_for_robot_load_then_centre.py @@ -0,0 +1,493 @@ +from functools import partial +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from bluesky.run_engine import RunEngine +from bluesky.simulators import RunEngineSimulator, assert_message_and_return_remaining +from bluesky.utils import Msg +from dodal.devices.aperturescatterguard import AperturePosition +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.smargon import StubPosition +from dodal.devices.webcam import Webcam +from ophyd.sim import NullStatus +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan import ( + RobotLoadThenCentreComposite, + prepare_for_robot_load, + robot_load_then_centre, + take_robot_snapshots, +) +from mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback import ( + RobotLoadISPyBCallback, +) +from mx_bluesky.hyperion.parameters.gridscan import ( + PinTipCentreThenXrayCentre, + RobotLoadThenCentre, +) + +from ....conftest import raw_params_from_file + + +@pytest.fixture +def robot_load_composite( + smargon, dcm, robot, aperture_scatterguard, oav, webcam, thawer, lower_gonio, eiger +) -> RobotLoadThenCentreComposite: + composite: RobotLoadThenCentreComposite = MagicMock() + composite.smargon = smargon + composite.dcm = dcm + set_mock_value(composite.dcm.energy_in_kev.user_readback, 11.105) + composite.robot = robot + composite.aperture_scatterguard = aperture_scatterguard + composite.smargon.stub_offsets.set = MagicMock(return_value=NullStatus()) + composite.aperture_scatterguard.set = MagicMock(return_value=NullStatus()) + composite.oav = oav + composite.webcam = webcam + composite.lower_gonio = lower_gonio + composite.thawer = thawer + composite.eiger = eiger + return composite + + +@pytest.fixture +def robot_load_then_centre_params(): + params = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_robot_load_params.json" + ) + return RobotLoadThenCentre(**params) + + +@pytest.fixture +def robot_load_then_centre_params_no_energy(robot_load_then_centre_params): + robot_load_then_centre_params.demand_energy_ev = None + return robot_load_then_centre_params + + +def dummy_set_energy_plan(energy, composite): + return (yield Msg("set_energy_plan")) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_when_plan_run_then_centring_plan_run_with_expected_parameters( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, +): + RE = RunEngine() + + RE(robot_load_then_centre(robot_load_composite, robot_load_then_centre_params)) + composite_passed = mock_centring_plan.call_args[0][0] + params_passed: PinTipCentreThenXrayCentre = mock_centring_plan.call_args[0][1] + + for name, value in vars(composite_passed).items(): + assert value == getattr(robot_load_composite, name) + + assert isinstance(params_passed, PinTipCentreThenXrayCentre) + assert params_passed.detector_params.expected_energy_ev == 11100 + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(side_effect=dummy_set_energy_plan), +) +def test_when_plan_run_with_requested_energy_specified_energy_change_executes( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, + sim_run_engine: RunEngineSimulator, +): + sim_run_engine.add_handler( + "read", + lambda msg: {"dcm-energy_in_kev": {"value": 11.105}}, + "dcm-energy_in_kev", + ) + messages = sim_run_engine.simulate_plan( + robot_load_then_centre(robot_load_composite, robot_load_then_centre_params) + ) + assert_message_and_return_remaining( + messages, lambda msg: msg.command == "set_energy_plan" + ) + params_passed: PinTipCentreThenXrayCentre = mock_centring_plan.call_args[0][1] + assert params_passed.detector_params.expected_energy_ev == 11100 + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan", + MagicMock(), +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([Msg("set_energy_plan")])), +) +def test_robot_load_then_centre_doesnt_set_energy_if_not_specified_and_current_energy_set_on_eiger( + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params_no_energy: RobotLoadThenCentre, + sim_run_engine: RunEngineSimulator, +): + robot_load_composite.eiger.set_detector_parameters = MagicMock() + sim_run_engine.add_handler( + "read", + lambda msg: {"dcm-energy_in_kev": {"value": 11.105}}, + "dcm-energy_in_kev", + ) + messages = sim_run_engine.simulate_plan( + robot_load_then_centre( + robot_load_composite, + robot_load_then_centre_params_no_energy, + ) + ) + assert not any(msg for msg in messages if msg.command == "set_energy_plan") + det_params = robot_load_composite.eiger.set_detector_parameters.call_args[0][0] + assert det_params.expected_energy_ev == 11105 + + +def run_simulating_smargon_wait( + robot_load_then_centre_params, + robot_load_composite, + total_disabled_reads, + sim_run_engine: RunEngineSimulator, +): + num_of_reads = 0 + + def return_not_disabled_after_reads(_): + nonlocal num_of_reads + num_of_reads += 1 + return {"values": {"value": int(num_of_reads < total_disabled_reads)}} + + sim_run_engine.add_handler( + "read", + lambda msg: {"dcm-energy_in_kev": {"value": 11.105}}, + "dcm-energy_in_kev", + ) + sim_run_engine.add_handler( + "read", return_not_disabled_after_reads, "smargon-disabled" + ) + + return sim_run_engine.simulate_plan( + robot_load_then_centre(robot_load_composite, robot_load_then_centre_params) + ) + + +@pytest.mark.parametrize("total_disabled_reads", [5, 3, 14]) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_given_smargon_disabled_when_plan_run_then_waits_on_smargon( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, + total_disabled_reads: int, + sim_run_engine, +): + messages = run_simulating_smargon_wait( + robot_load_then_centre_params, + robot_load_composite, + total_disabled_reads, + sim_run_engine, + ) + + mock_centring_plan.assert_called_once() + + sleep_messages = filter(lambda msg: msg.command == "sleep", messages) + read_disabled_messages = filter( + lambda msg: msg.command == "read" and msg.obj.name == "smargon-disabled", + messages, + ) + + assert len(list(sleep_messages)) == total_disabled_reads - 1 + assert len(list(read_disabled_messages)) == total_disabled_reads + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_given_smargon_disabled_for_longer_than_timeout_when_plan_run_then_throws_exception( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, + sim_run_engine, +): + with pytest.raises(TimeoutError): + run_simulating_smargon_wait( + robot_load_then_centre_params, + robot_load_composite, + 1000, + sim_run_engine, + ) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_when_plan_run_then_detector_arm_started_before_wait_on_robot_load( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, + sim_run_engine, +): + messages = run_simulating_smargon_wait( + robot_load_then_centre_params, + robot_load_composite, + 1, + sim_run_engine, + ) + + arm_detector_messages = filter( + lambda msg: msg.command == "set" and msg.obj.name == "eiger_do_arm", + messages, + ) + read_disabled_messages = filter( + lambda msg: msg.command == "read" and msg.obj.name == "smargon-disabled", + messages, + ) + + arm_detector_messages = list(arm_detector_messages) + assert len(arm_detector_messages) == 1 + + idx_of_arm_message = messages.index(arm_detector_messages[0]) + idx_of_first_read_disabled_message = messages.index(list(read_disabled_messages)[0]) + + assert idx_of_arm_message < idx_of_first_read_disabled_message + + +async def test_when_prepare_for_robot_load_called_then_moves_as_expected( + robot_load_composite: RobotLoadThenCentreComposite, +): + smargon = robot_load_composite.smargon + aperture_scatterguard = robot_load_composite.aperture_scatterguard + set_mock_value(smargon.x.user_readback, 10) + set_mock_value(smargon.z.user_readback, 5) + set_mock_value(smargon.omega.user_readback, 90) + + RE = RunEngine() + RE(prepare_for_robot_load(robot_load_composite)) + + assert await smargon.x.user_readback.get_value() == 0 + assert await smargon.z.user_readback.get_value() == 0 + assert await smargon.omega.user_readback.get_value() == 0 + + smargon.stub_offsets.set.assert_called_once_with(StubPosition.RESET_TO_ROBOT_LOAD) # type: ignore + aperture_scatterguard.set.assert_called_once_with(AperturePosition.ROBOT_LOAD) # type: ignore + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.end_load" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.update_barcode_and_snapshots" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.start_load" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_given_ispyb_callback_attached_when_robot_load_then_centre_plan_called_then_ispyb_deposited( + mock_centring_plan: MagicMock, + start_load: MagicMock, + update_barcode_and_snapshots: MagicMock, + end_load: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params: RobotLoadThenCentre, +): + robot_load_composite.oav.snapshot.last_saved_path.put("test_oav_snapshot") # type: ignore + set_mock_value(robot_load_composite.webcam.last_saved_path, "test_webcam_snapshot") + robot_load_composite.webcam.trigger = MagicMock(return_value=NullStatus()) + + RE = RunEngine() + RE.subscribe(RobotLoadISPyBCallback()) + + action_id = 1098 + start_load.return_value = action_id + + RE(robot_load_then_centre(robot_load_composite, robot_load_then_centre_params)) + + start_load.assert_called_once_with("cm31105", 4, 12345, 40, 3) + update_barcode_and_snapshots.assert_called_once_with( + action_id, "BARCODE", "test_webcam_snapshot", "test_oav_snapshot" + ) + end_load.assert_called_once_with(action_id, "success", "OK") + + +@patch("mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.datetime") +async def test_when_take_snapshots_called_then_filename_and_directory_set_and_device_triggered( + mock_datetime: MagicMock, oav: OAV, webcam: Webcam +): + TEST_DIRECTORY = "TEST" + + mock_datetime.now.return_value.strftime.return_value = "TIME" + + RE = RunEngine() + oav.snapshot.trigger = MagicMock(side_effect=oav.snapshot.trigger) + webcam.trigger = MagicMock(return_value=NullStatus()) + + RE(take_robot_snapshots(oav, webcam, Path(TEST_DIRECTORY))) + + oav.snapshot.trigger.assert_called_once() + assert oav.snapshot.filename.get() == "TIME_oav_snapshot_after_load" + assert oav.snapshot.directory.get() == TEST_DIRECTORY + + webcam.trigger.assert_called_once() + assert (await webcam.filename.get_value()) == "TIME_webcam_after_load" + assert (await webcam.directory.get_value()) == TEST_DIRECTORY + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan", + MagicMock(), +) +def test_given_lower_gonio_moved_when_robot_load_then_lower_gonio_moved_to_home_and_back( + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params_no_energy: RobotLoadThenCentre, + sim_run_engine: RunEngineSimulator, +): + initial_values = {"x": 0.11, "y": 0.12, "z": 0.13} + + def get_read(axis, msg): + return {f"lower_gonio-{axis}": {"value": initial_values[axis]}} + + for axis in initial_values.keys(): + sim_run_engine.add_handler( + "read", partial(get_read, axis), f"lower_gonio-{axis}" + ) + + messages = sim_run_engine.simulate_plan( + robot_load_then_centre( + robot_load_composite, + robot_load_then_centre_params_no_energy, + ) + ) + + for axis in initial_values.keys(): + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == f"lower_gonio-{axis}" + and msg.args == (0,), + ) + + for axis, initial in initial_values.items(): + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == f"lower_gonio-{axis}" + and msg.args == (initial,), + ) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_when_plan_run_then_lower_gonio_moved_before_robot_loads_and_back_after_smargon_enabled( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params_no_energy: RobotLoadThenCentre, + sim_run_engine: RunEngineSimulator, +): + initial_values = {"x": 0.11, "y": 0.12, "z": 0.13} + + def get_read(axis, msg): + return {f"lower_gonio-{axis}": {"value": initial_values[axis]}} + + for axis in initial_values.keys(): + sim_run_engine.add_handler( + "read", partial(get_read, axis), f"lower_gonio-{axis}" + ) + + messages = sim_run_engine.simulate_plan( + robot_load_then_centre( + robot_load_composite, + robot_load_then_centre_params_no_energy, + ) + ) + + assert_message_and_return_remaining( + messages, lambda msg: msg.command == "set" and msg.obj.name == "robot" + ) + + for axis in initial_values.keys(): + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == f"lower_gonio-{axis}" + and msg.args == (0,), + ) + + assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "read" and msg.obj.name == "smargon-disabled", + ) + + for axis, initial in initial_values.items(): + messages = assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == f"lower_gonio-{axis}" # noqa + and msg.args == (initial,), # noqa + ) + + +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.pin_centre_then_xray_centre_plan" +) +@patch( + "mx_bluesky.hyperion.experiment_plans.robot_load_then_centre_plan.set_energy_plan", + MagicMock(return_value=iter([])), +) +def test_when_plan_run_then_thawing_turned_on_for_expected_time( + mock_centring_plan: MagicMock, + robot_load_composite: RobotLoadThenCentreComposite, + robot_load_then_centre_params_no_energy: RobotLoadThenCentre, + sim_run_engine: RunEngineSimulator, +): + robot_load_then_centre_params_no_energy.thawing_time = (thaw_time := 50) + + sim_run_engine.add_handler( + "read", + lambda msg: {"dcm-energy_in_kev": {"value": 11.105}}, + "dcm-energy_in_kev", + ) + + messages = sim_run_engine.simulate_plan( + robot_load_then_centre( + robot_load_composite, + robot_load_then_centre_params_no_energy, + ) + ) + + assert_message_and_return_remaining( + messages, + lambda msg: msg.command == "set" + and msg.obj.name == "thawer-thaw_for_time_s" + and msg.args[0] == thaw_time, + ) diff --git a/tests/unit_tests/hyperion/external_interaction/__init__.py b/tests/unit_tests/hyperion/external_interaction/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/__init__.py b/tests/unit_tests/hyperion/external_interaction/callbacks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/conftest.py b/tests/unit_tests/hyperion/external_interaction/callbacks/conftest.py new file mode 100644 index 000000000..95c9bf4c8 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/conftest.py @@ -0,0 +1,260 @@ +import pytest +from dodal.devices.synchrotron import SynchrotronMode +from dodal.devices.zocalo.zocalo_results import ZOCALO_READING_PLAN_NAME +from event_model.documents import Event, EventDescriptor, RunStart, RunStop + +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from tests.conftest import create_dummy_scan_spec + +from .....conftest import default_raw_params, raw_params_from_file +from ...conftest import OavGridSnapshotTestEvents + + +def dummy_params(): + dummy_params = ThreeDGridScan(**default_raw_params()) + return dummy_params + + +def dummy_params_2d(): + raw_params = raw_params_from_file( + "tests/test_data/parameter_json_files/test_gridscan_param_defaults.json" + ) + raw_params["z_steps"] = 1 + return ThreeDGridScan(**raw_params) + + +@pytest.fixture +def test_rotation_start_outer_document(dummy_rotation_params): + return { + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": dummy_rotation_params.json(), + } + + +class TestData(OavGridSnapshotTestEvents): + DUMMY_TIME_STRING: str = "1970-01-01 00:00:00" + GOOD_ISPYB_RUN_STATUS: str = "DataCollection Successful" + BAD_ISPYB_RUN_STATUS: str = "DataCollection Unsuccessful" + test_start_document: RunStart = { # type: ignore + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": CONST.PLAN.GRIDSCAN_OUTER, + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + CONST.TRIGGER.ZOCALO: CONST.PLAN.DO_FGS, + "hyperion_parameters": dummy_params().json(), + } + test_gridscan3d_start_document: RunStart = { # type: ignore + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": "test", + "subplan_name": CONST.PLAN.GRID_DETECT_AND_DO_GRIDSCAN, + "hyperion_parameters": dummy_params().json(), + } + test_gridscan2d_start_document = { + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": "test", + "subplan_name": CONST.PLAN.GRID_DETECT_AND_DO_GRIDSCAN, + "hyperion_parameters": dummy_params_2d().json(), + } + test_rotation_start_main_document = { + "uid": "2093c941-ded1-42c4-ab74-ea99980fbbfd", + "subplan_name": CONST.PLAN.ROTATION_MAIN, + "zocalo_environment": "dev_artemis", + } + test_gridscan_outer_start_document = { + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": CONST.PLAN.GRIDSCAN_OUTER, + "subplan_name": CONST.PLAN.GRIDSCAN_OUTER, + "zocalo_environment": "dev_artemis", + CONST.TRIGGER.ZOCALO: CONST.PLAN.DO_FGS, + "hyperion_parameters": dummy_params().json(), + } + test_rotation_event_document_during_data_collection: Event = { + "descriptor": "bd45c2e5-2b85-4280-95d7-a9a15800a78b", + "time": 2666604299.928203, + "data": { + "aperture_scatterguard-selected_aperture": { + "name": "Medium", + "GDA_name": "MEDIUM", + "radius_microns": 50, + "location": (15, 16, 2, 18, 19), + }, + "attenuator-actual_transmission": 0.98, + "flux_flux_reading": 9.81, + "dcm-energy_in_kev": 11.105, + }, + "timestamps": {"det1": 1666604299.8220396, "det2": 1666604299.8235943}, + "seq_num": 1, + "uid": "2093c941-ded1-42c4-ab74-ea99980fbbfd", + "filled": {}, + } + test_rotation_stop_main_document: RunStop = { + "run_start": "2093c941-ded1-42c4-ab74-ea99980fbbfd", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "success", + "reason": "Test succeeded", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_run_gridscan_start_document: RunStart = { # type: ignore + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": CONST.PLAN.GRIDSCAN_AND_MOVE, + "subplan_name": CONST.PLAN.GRIDSCAN_MAIN, + } + test_do_fgs_start_document: RunStart = { # type: ignore + "uid": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604299.6149616, + "versions": {"ophyd": "1.6.4.post76+g0895f9f", "bluesky": "1.8.3"}, + "scan_id": 1, + "plan_type": "generator", + "plan_name": CONST.PLAN.GRIDSCAN_AND_MOVE, + "subplan_name": CONST.PLAN.DO_FGS, + "scan_points": create_dummy_scan_spec(10, 20, 30), + } + test_descriptor_document_oav_rotation_snapshot: EventDescriptor = { + "uid": "c7d698ce-6d49-4c56-967e-7d081f964573", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": CONST.DESCRIPTORS.OAV_ROTATION_SNAPSHOT_TRIGGERED, + } # type: ignore + test_descriptor_document_pre_data_collection: EventDescriptor = { + "uid": "bd45c2e5-2b85-4280-95d7-a9a15800a78b", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": CONST.DESCRIPTORS.HARDWARE_READ_PRE, + } # type: ignore + test_descriptor_document_during_data_collection: EventDescriptor = { + "uid": "bd45c2e5-2b85-4280-95d7-a9a15800a78b", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": CONST.DESCRIPTORS.HARDWARE_READ_DURING, + } # type: ignore + test_descriptor_document_zocalo_hardware: EventDescriptor = { + "uid": "f082901b-7453-4150-8ae5-c5f98bb34406", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": CONST.DESCRIPTORS.ZOCALO_HW_READ, + } # type: ignore + test_event_document_oav_rotation_snapshot: Event = { + "descriptor": "c7d698ce-6d49-4c56-967e-7d081f964573", + "time": 1666604299.828203, + "timestamps": {}, + "seq_num": 1, + "uid": "32d7c25c-c310-4292-ac78-36ce6509be3d", + "data": {"oav_snapshot_last_saved_path": "snapshot_0"}, + } + test_event_document_pre_data_collection: Event = { + "descriptor": "bd45c2e5-2b85-4280-95d7-a9a15800a78b", + "time": 1666604299.828203, + "data": { + "s4_slit_gaps_xgap": 0.1234, + "s4_slit_gaps_ygap": 0.2345, + "synchrotron-synchrotron_mode": SynchrotronMode.USER, + "undulator-current_gap": 1.234, + "smargon-x": 0.158435435, + "smargon-y": 0.023547354, + "smargon-z": 0.00345684712, + }, + "timestamps": {"det1": 1666604299.8220396, "det2": 1666604299.8235943}, + "seq_num": 1, + "uid": "29033ecf-e052-43dd-98af-c7cdd62e8173", + "filled": {}, + } + test_event_document_during_data_collection: Event = { + "descriptor": "bd45c2e5-2b85-4280-95d7-a9a15800a78b", + "time": 2666604299.928203, + "data": { + "aperture_scatterguard-selected_aperture": { + "name": "Medium", + "GDA_name": "MEDIUM", + "radius_microns": 50, + "location": (15, 16, 2, 18, 19), + }, + "attenuator-actual_transmission": 1, + "flux_flux_reading": 10, + "dcm-energy_in_kev": 11.105, + "eiger_bit_depth": "16", + }, + "timestamps": { + "det1": 1666604299.8220396, + "det2": 1666604299.8235943, + "eiger_bit_depth": 1666604299.8220396, + }, + "seq_num": 1, + "uid": "29033ecf-e052-43dd-98af-c7cdd62e8174", + "filled": {}, + } + test_event_document_zocalo_hardware: Event = { + "uid": "29033ecf-e052-43dd-98af-c7cdd62e8175", + "time": 1709654583.9770422, + "data": {"eiger_odin_file_writer_id": "test_path"}, + "timestamps": {"eiger_odin_file_writer_id": 1666604299.8220396}, + "seq_num": 1, + "filled": {}, + "descriptor": "f082901b-7453-4150-8ae5-c5f98bb34406", + } + test_stop_document: RunStop = { + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "success", + "reason": "", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_run_gridscan_stop_document: RunStop = { + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "success", + "reason": "", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_do_fgs_gridscan_stop_document: RunStop = { + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "success", + "reason": "", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_failed_stop_document: RunStop = { + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "fail", + "reason": "could not connect to devices", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_run_gridscan_failed_stop_document: RunStop = { + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "time": 1666604300.0310638, + "uid": "65b2bde5-5740-42d7-9047-e860e06fbe15", + "exit_status": "fail", + "reason": "could not connect to devices", + "num_events": {"fake_ispyb_params": 1, "primary": 1}, + } + test_descriptor_document_zocalo_reading: EventDescriptor = { + "uid": "unique_id_zocalo_reading", + "run_start": "d8bee3ee-f614-4e7a-a516-25d6b9e87ef3", + "name": ZOCALO_READING_PLAN_NAME, + } # type:ignore + test_zocalo_reading_event: Event = { + "descriptor": "unique_id_zocalo_reading", + "data": {"zocalo-results": []}, + } # type:ignore diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/robot_load/test_robot_load_ispyb_callback.py b/tests/unit_tests/hyperion/external_interaction/callbacks/robot_load/test_robot_load_ispyb_callback.py new file mode 100644 index 000000000..cb77f8ec3 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/robot_load/test_robot_load_ispyb_callback.py @@ -0,0 +1,139 @@ +from unittest.mock import MagicMock, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine +from dodal.devices.oav.oav_detector import OAV +from dodal.devices.robot import BartRobot +from dodal.devices.webcam import Webcam +from ophyd_async.core import set_mock_value + +from mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback import ( + RobotLoadISPyBCallback, +) +from mx_bluesky.hyperion.parameters.constants import CONST + +VISIT_PATH = "/tmp/cm31105-4" + +SAMPLE_ID = 231412 +SAMPLE_PUCK = 50 +SAMPLE_PIN = 4 +ACTION_ID = 1098 + +metadata = { + "subplan_name": CONST.PLAN.ROBOT_LOAD, + "metadata": { + "visit_path": VISIT_PATH, + "sample_id": SAMPLE_ID, + "sample_puck": SAMPLE_PUCK, + "sample_pin": SAMPLE_PIN, + }, + "activate_callbacks": [ + "RobotLoadISPyBCallback", + ], +} + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.end_load" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.start_load" +) +def test_given_start_doc_with_expected_data_then_data_put_in_ispyb( + start_load: MagicMock, + end_load: MagicMock, +): + RE = RunEngine() + RE.subscribe(RobotLoadISPyBCallback()) + start_load.return_value = ACTION_ID + + @bpp.run_decorator(md=metadata) + def my_plan(): + yield from bps.null() + + RE(my_plan()) + + start_load.assert_called_once_with("cm31105", 4, SAMPLE_ID, SAMPLE_PUCK, SAMPLE_PIN) + end_load.assert_called_once_with(ACTION_ID, "success", "OK") + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.end_load" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.start_load" +) +def test_given_failing_plan_then_exception_detail( + start_load: MagicMock, + end_load: MagicMock, +): + RE = RunEngine() + RE.subscribe(RobotLoadISPyBCallback()) + start_load.return_value = ACTION_ID + + class _Exception(Exception): ... + + @bpp.run_decorator(md=metadata) + def my_plan(): + raise _Exception("BAD") + yield from bps.null() + + with pytest.raises(_Exception): + RE(my_plan()) + + start_load.assert_called_once_with("cm31105", 4, SAMPLE_ID, SAMPLE_PUCK, SAMPLE_PIN) + end_load.assert_called_once_with(ACTION_ID, "fail", "BAD") + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.end_load" +) +def test_given_end_called_but_no_start_then_exception_raised(end_load): + callback = RobotLoadISPyBCallback() + callback.active = True + with pytest.raises(AssertionError): + callback.activity_gated_stop({"run_uid": None}) # type: ignore + end_load.assert_not_called() + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.end_load" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.start_load" +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.robot_load.ispyb_callback.ExpeyeInteraction.update_barcode_and_snapshots" +) +def test_given_plan_reads_barcode_then_data_put_in_ispyb( + update_barcode_and_snapshots: MagicMock, + start_load: MagicMock, + end_load: MagicMock, + robot: BartRobot, + oav: OAV, + webcam: Webcam, +): + RE = RunEngine() + RE.subscribe(RobotLoadISPyBCallback()) + start_load.return_value = ACTION_ID + + oav.snapshot.last_saved_path.put("test_oav_snapshot") # type: ignore + set_mock_value(webcam.last_saved_path, "test_webcam_snapshot") + + @bpp.run_decorator(md=metadata) + def my_plan(): + yield from bps.create(name=CONST.DESCRIPTORS.ROBOT_LOAD) + yield from bps.read(robot.barcode) + yield from bps.read(oav.snapshot) + yield from bps.read(webcam) + yield from bps.save() + + RE(my_plan()) + + start_load.assert_called_once_with("cm31105", 4, SAMPLE_ID, SAMPLE_PUCK, SAMPLE_PIN) + update_barcode_and_snapshots.assert_called_once_with( + ACTION_ID, "BARCODE", "test_webcam_snapshot", "test_oav_snapshot" + ) + end_load.assert_called_once_with(ACTION_ID, "success", "OK") diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/__init__.py b/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_callback.py b/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_callback.py new file mode 100644 index 000000000..893ec75fd --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_callback.py @@ -0,0 +1,331 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) + +from ...conftest import ( + EXPECTED_END_TIME, + EXPECTED_START_TIME, + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_SAMPLE_ID, + TEST_SESSION_ID, + assert_upsert_call_with, + mx_acquisition_from_conn, +) +from ..conftest import TestData + +EXPECTED_DATA_COLLECTION = { + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": TEST_SAMPLE_ID, + "detectorid": 78, + "axisstart": 0.0, + "axisrange": 0.1, + "axisend": 180, + "comments": "test", + "data_collection_number": 1, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/dls/i03/data/2024/cm31105-4/auto/123456/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "omegastart": 0, + "start_image_number": 1, + "xbeam": 150.0, + "ybeam": 160.0, + "synchrotron_mode": None, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_1_master.h5", + "nimages": 1800, + "kappastart": None, +} + + +@pytest.fixture +def rotation_start_outer_doc_without_snapshots( + test_rotation_start_outer_document, dummy_rotation_params +): + dummy_rotation_params.ispyb_extras.xtal_snapshots_omega_start = None + test_rotation_start_outer_document["hyperion_parameters"] = ( + dummy_rotation_params.json() + ) + return test_rotation_start_outer_document + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_activity_gated_start( + mock_ispyb_conn, rotation_start_outer_doc_without_snapshots +): + callback = RotationISPyBCallback() + + callback.activity_gated_start(rotation_start_outer_doc_without_snapshots) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx.upsert_data_collection_group.mock_calls[0], + mx.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "SAD", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + EXPECTED_DATA_COLLECTION, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_activity_gated_start_with_snapshot_parameters( + mock_ispyb_conn, test_rotation_start_outer_document +): + callback = RotationISPyBCallback() + + callback.activity_gated_start(test_rotation_start_outer_document) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx.upsert_data_collection_group.mock_calls[0], + mx.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "SAD", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + EXPECTED_DATA_COLLECTION + | { + "xtal_snapshot1": "test_1_y", + "xtal_snapshot2": "test_2_y", + "xtal_snapshot3": "test_3_y", + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_hardware_read_events( + mock_ispyb_conn, dummy_rotation_params, test_rotation_start_outer_document +): + callback = RotationISPyBCallback() + callback.activity_gated_start(test_rotation_start_outer_document) # pyright: ignore + callback.activity_gated_start( + TestData.test_rotation_start_main_document # pyright: ignore + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + + mx.upsert_data_collection_group.reset_mock() + mx.upsert_data_collection.reset_mock() + + callback.activity_gated_descriptor( + TestData.test_descriptor_document_pre_data_collection + ) + callback.activity_gated_event(TestData.test_event_document_pre_data_collection) + mx.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + "slitgaphorizontal": 0.1234, + "slitgapvertical": 0.2345, + "synchrotronmode": "User", + "undulatorgap1": 1.234, + "comments": "Sample position (µm): (158, 24, 3) test ", + }, + ) + expected_data = TestData.test_event_document_pre_data_collection["data"] + assert_upsert_call_with( + mx.update_dc_position.mock_calls[0], + mx.get_dc_position_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "pos_x": expected_data["smargon-x"], + "pos_y": expected_data["smargon-y"], + "pos_z": expected_data["smargon-z"], + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_flux_read_events( + mock_ispyb_conn, dummy_rotation_params, test_rotation_start_outer_document +): + callback = RotationISPyBCallback() + callback.activity_gated_start(test_rotation_start_outer_document) # pyright: ignore + callback.activity_gated_start( + TestData.test_rotation_start_main_document # pyright: ignore + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + callback.activity_gated_descriptor( + TestData.test_descriptor_document_pre_data_collection + ) + callback.activity_gated_event(TestData.test_event_document_pre_data_collection) + mx.upsert_data_collection_group.reset_mock() + mx.upsert_data_collection.reset_mock() + callback.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + callback.activity_gated_event( + TestData.test_rotation_event_document_during_data_collection + ) + + mx.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + "focal_spot_size_at_samplex": 0.05, + "focal_spot_size_at_sampley": 0.02, + "beamsize_at_samplex": 0.05, + "beamsize_at_sampley": 0.02, + "wavelength": 1.1164718451643736, + "transmission": 98, + "flux": 9.81, + "resolution": 1.1830593328548429, + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_oav_rotation_snapshot_triggered_event( + mock_ispyb_conn, dummy_rotation_params, rotation_start_outer_doc_without_snapshots +): + callback = RotationISPyBCallback() + callback.activity_gated_start(rotation_start_outer_doc_without_snapshots) # pyright: ignore + callback.activity_gated_start( + TestData.test_rotation_start_main_document # pyright: ignore + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + callback.activity_gated_descriptor( + TestData.test_descriptor_document_oav_rotation_snapshot + ) + + for snapshot in [ + {"filename": "snapshot_0", "colname": "xtal_snapshot1"}, + {"filename": "snapshot_90", "colname": "xtal_snapshot2"}, + {"filename": "snapshot_180", "colname": "xtal_snapshot3"}, + {"filename": "snapshot_270", "colname": "xtal_snapshot4"}, + ]: + mx.upsert_data_collection.reset_mock() + event_doc = dict(TestData.test_event_document_oav_rotation_snapshot) + event_doc["data"]["oav_snapshot_last_saved_path"] = snapshot["filename"] # type: ignore + callback.activity_gated_event( + TestData.test_event_document_oav_rotation_snapshot + ) + mx.upsert_data_collection_group.reset_mock() + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + snapshot["colname"]: snapshot["filename"], + }, + ) + + mx.upsert_data_collection_group.assert_not_called() + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_activity_gated_stop(mock_ispyb_conn, test_rotation_start_outer_document): + callback = RotationISPyBCallback() + callback.activity_gated_start(test_rotation_start_outer_document) # pyright: ignore + callback.activity_gated_start( + TestData.test_rotation_start_main_document # pyright: ignore + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + + mx.upsert_data_collection_group.reset_mock() + mx.upsert_data_collection.reset_mock() + + with patch( + "mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store.get_current_time_string", + new=MagicMock(return_value=EXPECTED_END_TIME), + ): + callback.activity_gated_stop(TestData.test_rotation_stop_main_document) + + assert mx.update_data_collection_append_comments.call_args_list[0] == ( + ( + TEST_DATA_COLLECTION_IDS[0], + "DataCollection Successful reason: Test succeeded", + " ", + ), + ) + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "endtime": EXPECTED_END_TIME, + "runstatus": "DataCollection Successful", + }, + ) + assert len(mx.upsert_data_collection.mock_calls) == 1 + + +def test_comment_correct_after_hardware_read( + mock_ispyb_conn, dummy_rotation_params, test_rotation_start_outer_document +): + callback = RotationISPyBCallback() + test_rotation_start_outer_document["hyperion_parameters"] = ( + test_rotation_start_outer_document[ + "hyperion_parameters" + ].replace('"comment": "test"', '"comment": "a lovely unit test"') + ) + callback.activity_gated_start(test_rotation_start_outer_document) # pyright: ignore + callback.activity_gated_start( + TestData.test_rotation_start_main_document # pyright: ignore + ) + mx = mx_acquisition_from_conn(mock_ispyb_conn) + + mx.upsert_data_collection_group.reset_mock() + mx.upsert_data_collection.reset_mock() + + callback.activity_gated_descriptor( + TestData.test_descriptor_document_pre_data_collection + ) + callback.activity_gated_event(TestData.test_event_document_pre_data_collection) + assert_upsert_call_with( + mx.upsert_data_collection.mock_calls[0], + mx.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + "slitgaphorizontal": 0.1234, + "slitgapvertical": 0.2345, + "synchrotronmode": "User", + "undulatorgap1": 1.234, + "comments": "Sample position (µm): (158, 24, 3) a lovely unit test ", + }, + ) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_mapping.py b/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_mapping.py new file mode 100644 index 000000000..dbf73a2c7 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/rotation/test_ispyb_mapping.py @@ -0,0 +1,16 @@ +from unittest.mock import patch + +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_mapping import ( + populate_data_collection_info_for_rotation, +) + + +def test_populate_data_collection_info_for_rotation_checks_snapshots( + dummy_rotation_params, +): + with patch( + "mx_bluesky.hyperion.log.ISPYB_LOGGER.warning", autospec=True + ) as warning: + dummy_rotation_params.ispyb_extras.xtal_snapshots_omega_start = None + populate_data_collection_info_for_rotation(dummy_rotation_params) + warning.assert_called_once_with("No xtal snapshot paths sent to ISPyB!") diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py b/tests/unit_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py new file mode 100644 index 000000000..4cea69a23 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/test_external_callbacks.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from collections.abc import Callable +from unittest.mock import MagicMock, patch + +import pytest +from bluesky.callbacks.zmq import Proxy, RemoteDispatcher +from dodal.log import LOGGER as DODAL_LOGGER + +from mx_bluesky.hyperion.external_interaction.callbacks.__main__ import ( + main, + setup_callbacks, + setup_logging, + setup_threads, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER, NEXUS_LOGGER + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.__main__.parse_callback_dev_mode_arg", + return_value=("DEBUG", True), +) +@patch("mx_bluesky.hyperion.external_interaction.callbacks.__main__.setup_callbacks") +@patch("mx_bluesky.hyperion.external_interaction.callbacks.__main__.setup_logging") +@patch("mx_bluesky.hyperion.external_interaction.callbacks.__main__.setup_threads") +def test_main_function( + setup_threads: MagicMock, + setup_logging: MagicMock, + setup_callbacks: MagicMock, + parse_callback_dev_mode_arg: MagicMock, +): + setup_threads.return_value = (MagicMock(), MagicMock(), MagicMock(), MagicMock()) + + main() + setup_threads.assert_called() + setup_logging.assert_called() + setup_callbacks.assert_called() + + +def test_setup_callbacks(): + current_number_of_callbacks = 6 + cbs = setup_callbacks() + assert len(cbs) == current_number_of_callbacks + assert len(set(cbs)) == current_number_of_callbacks + + +@pytest.mark.skip_log_setup +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.__main__.parse_callback_dev_mode_arg", + return_value=True, +) +def test_setup_logging(parse_callback_cli_args): + assert DODAL_LOGGER.parent != ISPYB_LOGGER + assert len(ISPYB_LOGGER.handlers) == 0 + assert len(NEXUS_LOGGER.handlers) == 0 + setup_logging(parse_callback_cli_args()) + assert len(ISPYB_LOGGER.handlers) == 4 + assert len(NEXUS_LOGGER.handlers) == 4 + assert DODAL_LOGGER.parent == ISPYB_LOGGER + setup_logging(parse_callback_cli_args()) + assert len(ISPYB_LOGGER.handlers) == 4 + assert len(NEXUS_LOGGER.handlers) == 4 + + +@patch("zmq.Context") +def test_setup_threads(_): + proxy, dispatcher, start_proxy, start_dispatcher = setup_threads() + assert isinstance(proxy, Proxy) + assert isinstance(dispatcher, RemoteDispatcher) + assert isinstance(start_proxy, Callable) + assert isinstance(start_dispatcher, Callable) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/test_plan_reactive_callback.py b/tests/unit_tests/hyperion/external_interaction/callbacks/test_plan_reactive_callback.py new file mode 100644 index 000000000..f26715656 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/test_plan_reactive_callback.py @@ -0,0 +1,220 @@ +from unittest.mock import DEFAULT, MagicMock, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine +from event_model.documents import Event, EventDescriptor, RunStart, RunStop + +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) + +from ..conftest import MockReactiveCallback, get_test_plan + + +def test_activity_gated_functions_not_called_when_inactive( + mocked_test_callback: MockReactiveCallback, +): + mocked_test_callback.start({}) # type: ignore + mocked_test_callback.activity_gated_start.assert_not_called() # type: ignore + mocked_test_callback.descriptor({}) # type: ignore + mocked_test_callback.activity_gated_descriptor.assert_not_called() # type: ignore + mocked_test_callback.event({}) # type: ignore + mocked_test_callback.activity_gated_event.assert_not_called() # type: ignore + mocked_test_callback.stop({}) # type: ignore + mocked_test_callback.activity_gated_stop.assert_not_called() # type: ignore + + +def test_activity_gated_functions_called_when_active( + mocked_test_callback: MockReactiveCallback, +): + mocked_test_callback.active = True + mocked_test_callback.start({}) # type: ignore + mocked_test_callback.activity_gated_start.assert_called_once() # type: ignore + mocked_test_callback.descriptor({}) # type: ignore + mocked_test_callback.activity_gated_descriptor.assert_called_once() # type: ignore + mocked_test_callback.event({}) # type: ignore + mocked_test_callback.activity_gated_event.assert_called_once() # type: ignore + mocked_test_callback.stop({}) # type: ignore + mocked_test_callback.activity_gated_stop.assert_called_once() # type: ignore + + +def test_activates_on_appropriate_start_doc(mocked_test_callback): + assert mocked_test_callback.active is False + mocked_test_callback.start({"activate_callbacks": ["MockReactiveCallback"]}) + assert mocked_test_callback.active is True + + +def test_deactivates_on_appropriate_stop_doc_uid(mocked_test_callback): + assert mocked_test_callback.active is False + mocked_test_callback.start( + {"activate_callbacks": ["MockReactiveCallback"], "uid": "foo"} + ) + assert mocked_test_callback.active is True + mocked_test_callback.stop({"run_start": "foo"}) + assert mocked_test_callback.active is False + + +def test_doesnt_deactivate_on_inappropriate_stop_doc_uid(mocked_test_callback): + assert mocked_test_callback.active is False + mocked_test_callback.start( + {"activate_callbacks": ["MockReactiveCallback"], "uid": "foo"} + ) + assert mocked_test_callback.active is True + mocked_test_callback.stop({"run_start": "bar"}) + assert mocked_test_callback.active is True + + +def test_activates_on_metadata( + RE_with_mock_callback: tuple[RunEngine, MockReactiveCallback], +): + RE, callback = RE_with_mock_callback + RE(get_test_plan("MockReactiveCallback")[0]()) + callback.activity_gated_start.assert_called_once() + callback.activity_gated_descriptor.assert_called_once() + callback.activity_gated_event.assert_called_once() + callback.activity_gated_stop.assert_called_once() + + +def test_deactivates_after_closing( + RE_with_mock_callback: tuple[RunEngine, MockReactiveCallback], +): + RE, callback = RE_with_mock_callback + assert callback.active is False + RE(get_test_plan("MockReactiveCallback")[0]()) + assert callback.active is False + + +def test_doesnt_activate_on_wrong_metadata( + RE_with_mock_callback: tuple[RunEngine, MockReactiveCallback], +): + RE, callback = RE_with_mock_callback + RE(get_test_plan("TestNotCallback")[0]()) + callback.activity_gated_start.assert_not_called() # type: ignore + callback.activity_gated_descriptor.assert_not_called() # type: ignore + callback.activity_gated_event.assert_not_called() # type: ignore + callback.activity_gated_stop.assert_not_called() # type: ignore + + +def test_cb_logs_and_raises_exception(): + cb = MockReactiveCallback() + cb.active = True + + class MockTestException(Exception): ... + + e = MockTestException() + + def mock_excepting_func(_): + raise e + + cb.log = MagicMock() + + with pytest.raises(MockTestException): + cb._run_activity_gated("start", mock_excepting_func, {"start": "test"}) + + cb.log.exception.assert_called_with(e) + + +def test_emit_called_correctly(): + receiving_cb = MockReactiveCallback() + test_cb = PlanReactiveCallback(emit=receiving_cb, log=MagicMock()) + + start_doc: RunStart = {"uid": "123", "time": 0} + desc_doc: EventDescriptor = { + "data_keys": {}, + "run_start": "123", + "uid": "987", + "time": 0, + } + event_doc: Event = { + "data": {}, + "time": 0, + "descriptor": "987", + "timestamps": {}, + "uid": "999", + "seq_num": 0, + } + stop_doc: RunStop = { + "exit_status": "success", + "run_start": "123", + "uid": "456", + "time": 0, + } + + test_cb.active = True + receiving_cb.active = True + + test_cb.start(start_doc) + receiving_cb.activity_gated_start.assert_called_once_with(start_doc) + test_cb.descriptor(desc_doc) + receiving_cb.activity_gated_descriptor.assert_called_once_with(desc_doc) + test_cb.event(event_doc) + receiving_cb.activity_gated_event.assert_called_once_with(event_doc) + test_cb.stop(stop_doc) + receiving_cb.activity_gated_stop.assert_called_once_with(stop_doc) + + +class OuterCallback(PlanReactiveCallback): + pass + + +class InnerCallback(PlanReactiveCallback): + pass + + +def test_activate_callbacks_doesnt_deactivate_unlisted_callbacks(RE: RunEngine): + @bpp.set_run_key_decorator("inner_plan") + @bpp.run_decorator(md={"activate_callbacks": ["InnerCallback"]}) + def inner_plan(): + yield from bps.null() + + @bpp.set_run_key_decorator("outer_plan") + @bpp.run_decorator(md={"activate_callbacks": ["OuterCallback"]}) + def outer_plan(): + yield from inner_plan() + + outer_callback = OuterCallback(MagicMock()) + inner_callback = InnerCallback(MagicMock()) + + RE.subscribe(outer_callback) + RE.subscribe(inner_callback) + + with patch.multiple( + outer_callback, activity_gated_start=DEFAULT, activity_gated_stop=DEFAULT + ): + with patch.multiple( + inner_callback, activity_gated_start=DEFAULT, activity_gated_stop=DEFAULT + ): + root_mock = MagicMock() + # fmt: off + root_mock.attach_mock(outer_callback.activity_gated_start, "outer_start") # pyright: ignore + root_mock.attach_mock(outer_callback.activity_gated_stop, "outer_stop") # pyright: ignore + root_mock.attach_mock(inner_callback.activity_gated_start, "inner_start") # pyright: ignore + root_mock.attach_mock(inner_callback.activity_gated_stop, "inner_stop") # pyright: ignore + # fmt: on + RE(outer_plan()) + + assert [call[0] for call in root_mock.mock_calls] == [ + "outer_start", + "outer_start", + "inner_start", + "outer_stop", + "inner_stop", + "outer_stop", + ] + + assert ( + root_mock.mock_calls[0].args[0]["uid"] + != root_mock.mock_calls[1].args[0]["uid"] + ) + assert root_mock.mock_calls[1].args[0] == root_mock.mock_calls[2].args[0] + assert root_mock.mock_calls[3].args[0] == root_mock.mock_calls[4].args[0] + assert ( + root_mock.mock_calls[0].args[0]["uid"] + == root_mock.mock_calls[5].args[0]["run_start"] + ) + assert ( + root_mock.mock_calls[2].args[0]["uid"] + == root_mock.mock_calls[4].args[0]["run_start"] + ) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/test_rotation_callbacks.py b/tests/unit_tests/hyperion/external_interaction/callbacks/test_rotation_callbacks.py new file mode 100644 index 000000000..7be099150 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/test_rotation_callbacks.py @@ -0,0 +1,420 @@ +import os +from collections.abc import Callable, Sequence +from unittest.mock import MagicMock, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine +from dodal.beamlines import i03 +from dodal.devices.attenuator import Attenuator +from dodal.devices.eiger import EigerDetector +from dodal.devices.flux import Flux +from event_model import RunStart +from ophyd.sim import make_fake_device +from ophyd_async.core import DeviceCollector, set_mock_value + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, + read_hardware_for_zocalo, +) +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_rotation_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback import ( + RotationISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.parameters.components import IspybExperimentType +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from .....conftest import raw_params_from_file + + +@pytest.fixture +def params(): + return RotationScan( + **raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + ) + + +@pytest.fixture +def test_outer_start_doc(params: RotationScan): + return { + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": params.json(), + } + + +@pytest.fixture +def test_main_start_doc(): + return { + "subplan_name": CONST.PLAN.ROTATION_MAIN, + "zocalo_environment": "dev_zocalo", + } + + +def activate_callbacks(cbs: tuple[RotationNexusFileCallback, RotationISPyBCallback]): + cbs[1].active = True + cbs[0].active = True + + +def fake_rotation_scan( + params: RotationScan, + subscriptions: ( + tuple[RotationNexusFileCallback, RotationISPyBCallback] + | Sequence[PlanReactiveCallback] + ), + after_open_do: Callable | None = None, + after_main_do: Callable | None = None, +): + with DeviceCollector(mock=True): + attenuator = Attenuator("", "attenuator") + flux = make_fake_device(Flux)(name="flux") + eiger = make_fake_device(EigerDetector)(name="eiger") + dcm = i03.dcm(fake_with_ophyd_sim=True) + ap_sg = i03.aperture_scatterguard(fake_with_ophyd_sim=True) + set_mock_value(dcm.energy_in_kev.user_readback, 12.1) + + @bpp.subs_decorator(list(subscriptions)) + @bpp.set_run_key_decorator("rotation_scan_with_cleanup_and_subs") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": params.json(), + CONST.TRIGGER.ZOCALO: CONST.PLAN.ROTATION_MAIN, + "zocalo_environment": params.zocalo_environment, + } + ) + def plan(): + if after_open_do: + after_open_do(subscriptions) + + @bpp.set_run_key_decorator(CONST.PLAN.ROTATION_MAIN) + @bpp.run_decorator( + md={ + "subplan_name": CONST.PLAN.ROTATION_MAIN, + "zocalo_environment": "dev_zocalo", + "scan_points": [params.scan_points], + } + ) + def fake_main_plan(): + yield from read_hardware_during_collection( + ap_sg, attenuator, flux, dcm, eiger + ) + yield from read_hardware_for_zocalo(eiger) + if after_main_do: + after_main_do(subscriptions) + yield from bps.sleep(0) + + yield from fake_main_plan() + + return plan() + + +@pytest.fixture +def activated_mocked_cbs(): + nexus_callback, ispyb_callback = create_rotation_callbacks() + ispyb_callback.emit_cb = MagicMock + activate_callbacks((nexus_callback, ispyb_callback)) + nexus_callback.activity_gated_event = MagicMock(autospec=True) + nexus_callback.activity_gated_start = MagicMock(autospec=True) + return nexus_callback, ispyb_callback + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb", + autospec=True, +) +def test_nexus_handler_gets_documents_in_mock_plan( + ispyb, + RE: RunEngine, + params: RotationScan, + activated_mocked_cbs: tuple[RotationNexusFileCallback, RotationISPyBCallback], +): + nexus_handler, _ = activated_mocked_cbs + RE(fake_rotation_scan(params, [nexus_handler])) + + assert nexus_handler.activity_gated_start.call_count == 2 # type: ignore + call_content_outer = nexus_handler.activity_gated_start.call_args_list[0].args[0] # type: ignore + assert call_content_outer["hyperion_parameters"] == params.json() + call_content_inner = nexus_handler.activity_gated_start.call_args_list[1].args[0] # type: ignore + assert call_content_inner["subplan_name"] == CONST.PLAN.ROTATION_MAIN + + assert nexus_handler.activity_gated_event.call_count == 2 # type: ignore + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback.NexusWriter", + autospec=True, +) +def test_nexus_handler_only_writes_once( + nexus_writer: MagicMock, + RE: RunEngine, + params: RotationScan, + test_outer_start_doc, +): + nexus_writer.return_value.data_filename = "test_full_filename" + cb = RotationNexusFileCallback() + cb.active = True + RE(fake_rotation_scan(params, [cb])) + nexus_writer.assert_called_once() + assert cb.writer is not None + cb.writer.create_nexus_file.assert_called_once() # type: ignore + + +def test_nexus_handler_triggers_write_file_when_told( + RE: RunEngine, params: RotationScan +): + pattern = f"{params.storage_directory}{params.file_name}_{params.detector_params.run_number}" + files = [f"{pattern}.nxs", f"{pattern}_master.h5"] + + def do_files(do_assert=False): + for file in files: + if do_assert: + assert os.path.isfile(file) + if os.path.isfile(file): + os.remove(file) + + do_files() + cb = RotationNexusFileCallback() + cb.active = True + RE(fake_rotation_scan(params, [cb])) + do_files(do_assert=True) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback.NexusWriter", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb", + autospec=True, +) +def test_zocalo_start_and_end_not_triggered_if_ispyb_ids_not_present( + ispyb_store, + zocalo_trigger, + nexus_writer, + RE: RunEngine, + params: RotationScan, + test_outer_start_doc, +): + nexus_writer.return_value.data_filename = "test_full_filename" + nexus_callback, ispyb_callback = create_rotation_callbacks() + activate_callbacks((nexus_callback, ispyb_callback)) + + ispyb_callback.ispyb = MagicMock(spec=StoreInIspyb) + ispyb_callback.params = params + with pytest.raises(ISPyBDepositionNotMade): + RE(fake_rotation_scan(params, (nexus_callback, ispyb_callback))) + ispyb_callback.emit_cb.zocalo_interactor.run_start.assert_not_called() # type: ignore + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback.NexusWriter", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb" +) +def test_ispyb_starts_on_opening_and_zocalo_on_main_so_ispyb_triggered_before_zocalo( + ispyb_store, + zocalo_trigger, + nexus_writer, + RE: RunEngine, + params: RotationScan, + test_outer_start_doc, + test_main_start_doc, +): + mock_store_in_ispyb_instance = MagicMock(spec=StoreInIspyb) + returned_ids = IspybIds(data_collection_group_id=0, data_collection_ids=(0,)) + mock_store_in_ispyb_instance.begin_deposition.return_value = returned_ids + mock_store_in_ispyb_instance.update_deposition.return_value = returned_ids + + ispyb_store.return_value = mock_store_in_ispyb_instance + nexus_writer.return_value.data_filename = "test_full_filename" + nexus_callback, ispyb_callback = create_rotation_callbacks() + activate_callbacks((nexus_callback, ispyb_callback)) + ispyb_callback.emit_cb.stop = MagicMock() # type: ignore + + def after_open_do( + callbacks: tuple[RotationNexusFileCallback, RotationISPyBCallback], + ): + ispyb_callback.ispyb.begin_deposition.assert_called_once() # pyright: ignore + ispyb_callback.ispyb.update_deposition.assert_not_called() # pyright: ignore + + def after_main_do( + callbacks: tuple[RotationNexusFileCallback, RotationISPyBCallback], + ): + ispyb_callback.ispyb.update_deposition.assert_called_once() # pyright: ignore + ispyb_callback.emit_cb.zocalo_interactor.run_start.assert_called_once() # type: ignore + + RE( + fake_rotation_scan( + params, (nexus_callback, ispyb_callback), after_open_do, after_main_do + ) + ) + + ispyb_callback.emit_cb.zocalo_interactor.run_start.assert_called_once() # type: ignore + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, +) +def test_ispyb_handler_grabs_uid_from_main_plan_and_not_first_start_doc( + zocalo, RE: RunEngine, params: RotationScan, test_outer_start_doc +): + (nexus_callback, ispyb_callback) = create_rotation_callbacks() + ispyb_callback.emit_cb = None + activate_callbacks((nexus_callback, ispyb_callback)) + nexus_callback.activity_gated_event = MagicMock(autospec=True) + nexus_callback.activity_gated_start = MagicMock(autospec=True) + ispyb_callback.activity_gated_start = MagicMock( + autospec=True, side_effect=ispyb_callback.activity_gated_start + ) + + def after_open_do( + callbacks: tuple[RotationNexusFileCallback, RotationISPyBCallback], + ): + ispyb_callback.activity_gated_start.assert_called_once() # type: ignore + assert ispyb_callback.uid_to_finalize_on is None + + def after_main_do( + callbacks: tuple[RotationNexusFileCallback, RotationISPyBCallback], + ): + ispyb_callback.ispyb_ids = IspybIds( + data_collection_ids=(0,), data_collection_group_id=0 + ) + assert ispyb_callback.activity_gated_start.call_count == 2 # type: ignore + assert ispyb_callback.uid_to_finalize_on is not None + + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb", + autospec=True, + ): + RE( + fake_rotation_scan( + params, (nexus_callback, ispyb_callback), after_open_do, after_main_do + ) + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb", + autospec=True, +) +def test_ispyb_reuses_dcgid_on_same_sampleID( + rotation_ispyb: MagicMock, + RE: RunEngine, + params: RotationScan, +): + ispyb_cb = RotationISPyBCallback() + ispyb_cb.active = True + ispyb_ids = IspybIds(data_collection_group_id=23, data_collection_ids=(45,)) + rotation_ispyb.return_value.begin_deposition.return_value = ispyb_ids + + test_cases = zip( + [123, 123, 123, 456, 123, 456, 456, 999, 789, 789, 789], + [False, True, True, False, False, False, True, False, False, True, True], + strict=False, + ) + + last_dcgid = None + + for sample_id, same_dcgid in test_cases: + params.sample_id = sample_id + + def after_open_do(callbacks: list[RotationISPyBCallback]): + assert callbacks[0].uid_to_finalize_on is None + + def after_main_do(callbacks: list[RotationISPyBCallback]): + assert callbacks[0].uid_to_finalize_on is not None + + RE(fake_rotation_scan(params, [ispyb_cb], after_open_do, after_main_do)) + + begin_deposition_scan_data: ScanDataInfo = ( + rotation_ispyb.return_value.begin_deposition.call_args.args[1][0] + ) + if same_dcgid: + assert begin_deposition_scan_data.data_collection_info.parent_id is not None + assert ( + begin_deposition_scan_data.data_collection_info.parent_id is last_dcgid + ) + else: + assert begin_deposition_scan_data.data_collection_info.parent_id is None + + last_dcgid = ispyb_cb.ispyb_ids.data_collection_group_id + + +n_images_store_id = [ + (123, False), + (3600, True), + (1800, True), + (150, False), + (500, True), + (201, True), + (1, False), + (2000, True), + (2000, True), + (2000, True), + (123, False), + (3600, True), + (1800, True), + (123, False), + (1800, True), +] + + +@pytest.mark.parametrize("n_images,store_id", n_images_store_id) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.rotation.ispyb_callback.StoreInIspyb", + new=MagicMock(), +) +def test_ispyb_handler_stores_sampleid_for_full_collection_not_screening( + n_images: int, + store_id: bool, + params: RotationScan, +): + cb = RotationISPyBCallback() + cb.active = True + + doc: RunStart = { + "time": 0, + "uid": "abc123", + } + + params.sample_id = 987678 + params.scan_width_deg = n_images / 10 + if n_images < 200: + params.ispyb_experiment_type = IspybExperimentType.CHARACTERIZATION + assert params.num_images == n_images + doc["subplan_name"] = CONST.PLAN.ROTATION_OUTER # type: ignore + doc["hyperion_parameters"] = params.json() # type: ignore + + cb.start(doc) + assert (cb.last_sample_id == 987678) is store_id diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/test_zocalo_handler.py b/tests/unit_tests/hyperion/external_interaction/callbacks/test_zocalo_handler.py new file mode 100644 index 000000000..a2a592404 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/test_zocalo_handler.py @@ -0,0 +1,142 @@ +from unittest.mock import MagicMock, call, patch + +import pytest +from dodal.devices.zocalo import ZocaloStartInfo + +from mx_bluesky.hyperion.external_interaction.callbacks.common.callback_util import ( + create_gridscan_callbacks, +) +from mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback import ( + ZocaloCallback, +) +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.parameters.constants import CONST + +from .conftest import TestData + +EXPECTED_DCID = 100 +EXPECTED_RUN_START_MESSAGE = {"event": "start", "ispyb_dcid": EXPECTED_DCID} +EXPECTED_RUN_END_MESSAGE = { + "event": "end", + "ispyb_dcid": EXPECTED_DCID, + "ispyb_wait_for_runstatus": "1", +} + +td = TestData() + + +def start_dict(plan_name: str = "test_plan_name", env: str = "test_env"): + return {CONST.TRIGGER.ZOCALO: plan_name, "zocalo_environment": env} + + +class TestZocaloHandler: + def _setup_handler(self): + zocalo_handler = ZocaloCallback() + assert zocalo_handler.triggering_plan is None + zocalo_handler.start(start_dict()) # type: ignore + assert zocalo_handler.triggering_plan == "test_plan_name" + assert zocalo_handler.zocalo_interactor is not None + return zocalo_handler + + def test_handler_gets_plan_name_from_start_doc(self): + self._setup_handler() + + def test_handler_doesnt_trigger_on_wrong_plan(self): + zocalo_handler = self._setup_handler() + zocalo_handler.start(start_dict("_not_test_plan_name")) # type: ignore + + def test_handler_raises_on_right_plan_with_wrong_metadata(self): + zocalo_handler = self._setup_handler() + with pytest.raises(AssertionError): + zocalo_handler.start({"subplan_name": "test_plan_name"}) # type: ignore + + def test_handler_raises_on_right_plan_with_no_ispyb_ids(self): + zocalo_handler = self._setup_handler() + with pytest.raises(ISPyBDepositionNotMade): + zocalo_handler.start( + { + "subplan_name": "test_plan_name", + "zocalo_environment": "test_env", + "scan_points": [{"test": [1, 2, 3]}], + } # type: ignore + ) + + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, + ) + def test_handler_inits_zocalo_trigger_on_right_plan(self, zocalo_trigger): + zocalo_handler = self._setup_handler() + zocalo_handler.start( + { + "subplan_name": "test_plan_name", + "zocalo_environment": "test_env", + "ispyb_dcids": (135, 139), + "scan_points": [{"test": [1, 2, 3]}], + } # type: ignore + ) + assert zocalo_handler.zocalo_interactor is not None + + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.zocalo_callback.ZocaloTrigger", + autospec=True, + ) + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter", + ) + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb", + ) + def test_execution_of_do_fgs_triggers_zocalo_calls( + self, ispyb_store: MagicMock, nexus_writer: MagicMock, zocalo_trigger + ): + dc_ids = (1, 2) + dcg_id = 4 + + mock_ids = IspybIds(data_collection_ids=dc_ids, data_collection_group_id=dcg_id) + ispyb_store.return_value.mock_add_spec(StoreInIspyb) + + _, ispyb_cb = create_gridscan_callbacks() + ispyb_cb.active = True + assert isinstance(zocalo_handler := ispyb_cb.emit_cb, ZocaloCallback) + zocalo_handler._reset_state() + zocalo_handler._reset_state = MagicMock() + + ispyb_store.return_value.begin_deposition.return_value = mock_ids + ispyb_store.return_value.update_deposition.return_value = mock_ids + + ispyb_cb.start(td.test_gridscan3d_start_document) # type: ignore + ispyb_cb.start(td.test_gridscan_outer_start_document) # type: ignore + ispyb_cb.start(td.test_do_fgs_start_document) # type: ignore + ispyb_cb.descriptor(td.test_descriptor_document_pre_data_collection) # type: ignore + ispyb_cb.event(td.test_event_document_pre_data_collection) + ispyb_cb.descriptor(td.test_descriptor_document_zocalo_hardware) + ispyb_cb.event(td.test_event_document_zocalo_hardware) + ispyb_cb.descriptor( + td.test_descriptor_document_during_data_collection # type: ignore + ) + ispyb_cb.event(td.test_event_document_during_data_collection) + assert zocalo_handler.zocalo_interactor is not None + + expected_start_calls = [ + call(ZocaloStartInfo(1, "test_path", 0, 200, 0)), + call(ZocaloStartInfo(2, "test_path", 200, 300, 1)), + ] + + zocalo_handler.zocalo_interactor.run_start.assert_has_calls( # type: ignore + expected_start_calls + ) + assert zocalo_handler.zocalo_interactor.run_start.call_count == len(dc_ids) # type: ignore + + ispyb_cb.stop(td.test_stop_document) + + zocalo_handler.zocalo_interactor.run_end.assert_has_calls( # type: ignore + [call(x) for x in dc_ids] + ) + assert zocalo_handler.zocalo_interactor.run_end.call_count == len(dc_ids) # type: ignore + + zocalo_handler._reset_state.assert_called() diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/__init__.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/__init__.py new file mode 100644 index 000000000..43ce9b8e9 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/__init__.py @@ -0,0 +1,2 @@ +"""This is a module so that one can access the test data variables stored in +hyperion.external_interaction.tests.conftest.TestData""" diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/conftest.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/conftest.py new file mode 100644 index 000000000..7605c7395 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/conftest.py @@ -0,0 +1,60 @@ +from unittest.mock import patch + +import pytest + +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) + + +@pytest.fixture +def nexus_writer(): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter" + ) as nw: + yield nw + + +@pytest.fixture +def mock_ispyb_get_time(): + with patch( + "mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils.get_current_time_string" + ) as p: + yield p + + +@pytest.fixture +def mock_ispyb_store_grid_scan(): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb" + ) as p: + yield p + + +@pytest.fixture +def mock_ispyb_update_time_and_status(): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb._update_scan_with_end_time_and_status" + ) as p: + yield p + + +@pytest.fixture +def mock_ispyb_begin_deposition(): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb.begin_deposition" + ) as p: + yield p + + +@pytest.fixture +def mock_ispyb_end_deposition(): + with patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb.end_deposition" + ) as p: + yield p + + +@pytest.fixture +def ispyb_handler(): + return GridscanISPyBCallback() diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_callback.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_callback.py new file mode 100644 index 000000000..845b5cf44 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_callback.py @@ -0,0 +1,252 @@ +from unittest.mock import MagicMock, patch + +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) + +from ...conftest import ( + EXPECTED_START_TIME, + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_SAMPLE_ID, + TEST_SESSION_ID, + assert_upsert_call_with, + mx_acquisition_from_conn, +) +from ..conftest import TestData + +EXPECTED_DATA_COLLECTION_3D_XY = { + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": TEST_SAMPLE_ID, + "detectorid": 78, + "data_collection_number": 1, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "start_image_number": 1, + "wavelength": None, + "xbeam": 150.0, + "ybeam": 160.0, + "synchrotron_mode": None, + "undulator_gap1": None, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_1_master.h5", +} + +EXPECTED_DATA_COLLECTION_3D_XZ = EXPECTED_DATA_COLLECTION_3D_XY | { + "data_collection_number": 2, + "filetemplate": "file_name_2_master.h5", +} + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +class TestXrayCentreISPyBCallback: + def test_activity_gated_start_3d(self, mock_ispyb_conn): + callback = GridscanISPyBCallback() + callback.activity_gated_start(TestData.test_gridscan3d_start_document) # pyright: ignore + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx_acq.upsert_data_collection_group.mock_calls[0], # pyright: ignore + mx_acq.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "Mesh3D", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION_3D_XY, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[1], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION_3D_XZ, + ) + mx_acq.upsert_data_collection.update_dc_position.assert_not_called() + mx_acq.upsert_data_collection.upsert_dc_grid.assert_not_called() + + def test_hardware_read_event_3d(self, mock_ispyb_conn): + callback = GridscanISPyBCallback() + callback.activity_gated_start(TestData.test_gridscan3d_start_document) # pyright: ignore + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + callback.activity_gated_descriptor( + TestData.test_descriptor_document_pre_data_collection + ) + callback.activity_gated_event(TestData.test_event_document_pre_data_collection) + mx_acq.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + "slitgaphorizontal": 0.1234, + "slitgapvertical": 0.2345, + "synchrotronmode": "User", + "undulatorgap1": 1.234, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[1], + mx_acq.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[1], + "slitgaphorizontal": 0.1234, + "slitgapvertical": 0.2345, + "synchrotronmode": "User", + "undulatorgap1": 1.234, + }, + ) + + def test_flux_read_events_3d(self, mock_ispyb_conn): + callback = GridscanISPyBCallback() + callback.activity_gated_start(TestData.test_gridscan3d_start_document) # pyright: ignore + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + callback.activity_gated_descriptor( + TestData.test_descriptor_document_pre_data_collection + ) + callback.activity_gated_event(TestData.test_event_document_pre_data_collection) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + + callback.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + callback.activity_gated_event( + TestData.test_event_document_during_data_collection + ) + + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[0], + "wavelength": 1.1164718451643736, + "transmission": 100, + "flux": 10, + "resolution": 1.1830593328548429, + "focal_spot_size_at_samplex": 0.05, + "focal_spot_size_at_sampley": 0.02, + "beamsize_at_samplex": 0.05, + "beamsize_at_sampley": 0.02, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[1], + mx_acq.get_data_collection_params(), + { + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "id": TEST_DATA_COLLECTION_IDS[1], + "wavelength": 1.1164718451643736, + "transmission": 100, + "flux": 10, + "resolution": 1.1830593328548429, + "focal_spot_size_at_samplex": 0.05, + "focal_spot_size_at_sampley": 0.02, + "beamsize_at_samplex": 0.05, + "beamsize_at_sampley": 0.02, + }, + ) + mx_acq.update_dc_position.assert_not_called() + mx_acq.upsert_dc_grid.assert_not_called() + + def test_activity_gated_event_oav_snapshot_triggered(self, mock_ispyb_conn): + callback = GridscanISPyBCallback() + callback.activity_gated_start(TestData.test_gridscan3d_start_document) # pyright: ignore + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + + callback.activity_gated_descriptor( + TestData.test_descriptor_document_oav_snapshot + ) + callback.activity_gated_event(TestData.test_event_document_oav_snapshot_xy) + callback.activity_gated_event(TestData.test_event_document_oav_snapshot_xz) + + mx_acq.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "nimages": 40 * 20, + "xtal_snapshot1": "test_1_y", + "xtal_snapshot2": "test_2_y", + "xtal_snapshot3": "test_3_y", + "comments": "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 " + "images in 100.0 um by 120.0 um steps. Top left (px): [50,100], " + "bottom right (px): [3250,1700].", + "axisstart": 0, + "omegastart": 0, + "axisend": 0, + "axisrange": 0, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[1], + mx_acq.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[1], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "nimages": 40 * 10, + "xtal_snapshot1": "test_1_z", + "xtal_snapshot2": "test_2_z", + "xtal_snapshot3": "test_3_z", + "comments": "Hyperion: Xray centring - Diffraction grid scan of 40 by 10 " + "images in 100.0 um by 120.0 um steps. Top left (px): [50,0], " + "bottom right (px): [3250,800].", + "axisstart": 90, + "omegastart": 90, + "axisend": 90, + "axisrange": 0, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_dc_grid.mock_calls[0], + mx_acq.get_dc_grid_params(), + { + "parentid": TEST_DATA_COLLECTION_IDS[0], + "dxinmm": 0.1, + "dyinmm": 0.12, + "stepsx": 40, + "stepsy": 20, + "micronsperpixelx": 1.25, + "micronsperpixely": 1.5, + "snapshotoffsetxpixel": 50, + "snapshotoffsetypixel": 100, + "orientation": "horizontal", + "snaked": True, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_dc_grid.mock_calls[1], + mx_acq.get_dc_grid_params(), + { + "parentid": TEST_DATA_COLLECTION_IDS[1], + "dxinmm": 0.1, + "dyinmm": 0.12, + "stepsx": 40, + "stepsy": 10, + "micronsperpixelx": 1.25, + "micronsperpixely": 1.5, + "snapshotoffsetxpixel": 50, + "snapshotoffsetypixel": 0, + "orientation": "horizontal", + "snaked": True, + }, + ) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_handler.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_handler.py new file mode 100644 index 000000000..9bbdbc93b --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_handler.py @@ -0,0 +1,183 @@ +from unittest.mock import MagicMock, patch + +import pytest +from graypy import GELFTCPHandler + +from mx_bluesky.hyperion.external_interaction.callbacks.__main__ import setup_logging +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback import ( + GridscanISPyBCallback, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.log import ISPYB_LOGGER + +from ..conftest import TestData + +DC_IDS = (1, 2) +DCG_ID = 4 +DC_GRID_IDS = (11, 12) +td = TestData() + + +def mock_store_in_ispyb(config, *args, **kwargs) -> StoreInIspyb: + mock = MagicMock(spec=StoreInIspyb) + mock.end_deposition = MagicMock(return_value=None) + mock.begin_deposition = MagicMock( + return_value=IspybIds( + data_collection_group_id=DCG_ID, data_collection_ids=DC_IDS + ) + ) + mock.update_deposition = MagicMock( + return_value=IspybIds( + data_collection_group_id=DCG_ID, + data_collection_ids=DC_IDS, + grid_ids=DC_GRID_IDS, + ) + ) + mock.append_to_comment = MagicMock() + return mock + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + MagicMock(return_value=td.DUMMY_TIME_STRING), +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.StoreInIspyb", + mock_store_in_ispyb, +) +class TestXrayCentreIspybHandler: + def test_fgs_failing_results_in_bad_run_status_in_ispyb( + self, + ): + ispyb_handler = GridscanISPyBCallback() + ispyb_handler.activity_gated_start(td.test_gridscan3d_start_document) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_pre_data_collection + ) + ispyb_handler.activity_gated_event(td.test_event_document_pre_data_collection) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_during_data_collection + ) + ispyb_handler.activity_gated_event( + td.test_event_document_during_data_collection # pyright: ignore + ) + ispyb_handler.activity_gated_stop(td.test_run_gridscan_failed_stop_document) + + ispyb_handler.ispyb.end_deposition.assert_called_once_with( # type: ignore + IspybIds( + data_collection_group_id=DCG_ID, + data_collection_ids=DC_IDS, + grid_ids=DC_GRID_IDS, + ), + "fail", + "could not connect to devices", + ) + + def test_fgs_raising_no_exception_results_in_good_run_status_in_ispyb( + self, + ): + ispyb_handler = GridscanISPyBCallback() + ispyb_handler.activity_gated_start(td.test_gridscan3d_start_document) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_pre_data_collection + ) + ispyb_handler.activity_gated_event(td.test_event_document_pre_data_collection) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_during_data_collection + ) + ispyb_handler.activity_gated_event( + td.test_event_document_during_data_collection + ) + ispyb_handler.activity_gated_stop(td.test_do_fgs_gridscan_stop_document) + + ispyb_handler.ispyb.end_deposition.assert_called_once_with( # type: ignore + IspybIds( + data_collection_group_id=DCG_ID, + data_collection_ids=DC_IDS, + grid_ids=DC_GRID_IDS, + ), + "success", + "", + ) + + @pytest.mark.skip_log_setup + def test_given_ispyb_callback_started_writing_to_ispyb_when_messages_logged_then_they_contain_dcgid( + self, + ): + setup_logging(True) + gelf_handler: MagicMock = next( + filter(lambda h: isinstance(h, GELFTCPHandler), ISPYB_LOGGER.handlers) # type: ignore + ) + gelf_handler.emit = MagicMock() + + ispyb_handler = GridscanISPyBCallback() + ispyb_handler.activity_gated_start(td.test_gridscan3d_start_document) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_pre_data_collection + ) + ispyb_handler.activity_gated_event(td.test_event_document_pre_data_collection) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_during_data_collection + ) + ispyb_handler.activity_gated_event( + td.test_event_document_during_data_collection + ) + + ISPYB_LOGGER.info("test") + latest_record = gelf_handler.emit.call_args.args[-1] + assert latest_record.dc_group_id == DCG_ID + + @pytest.mark.skip_log_setup + def test_given_ispyb_callback_finished_writing_to_ispyb_when_messages_logged_then_they_do_not_contain_dcgid( + self, + ): + setup_logging(True) + gelf_handler: MagicMock = next( + filter(lambda h: isinstance(h, GELFTCPHandler), ISPYB_LOGGER.handlers) # type: ignore + ) + gelf_handler.emit = MagicMock() + + ispyb_handler = GridscanISPyBCallback() + ispyb_handler.activity_gated_start(td.test_gridscan3d_start_document) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_pre_data_collection + ) + ispyb_handler.activity_gated_event(td.test_event_document_pre_data_collection) + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_during_data_collection + ) + ispyb_handler.activity_gated_event( + td.test_event_document_during_data_collection + ) + ispyb_handler.activity_gated_stop(td.test_run_gridscan_failed_stop_document) + + ISPYB_LOGGER.info("test") + latest_record = gelf_handler.emit.call_args.args[-1] + assert not hasattr(latest_record, "dc_group_id") + + @patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_callback.time", + side_effect=[2, 100], + ) + def test_given_fgs_plan_finished_when_zocalo_results_event_then_expected_comment_deposited( + self, mock_time + ): + ispyb_handler = GridscanISPyBCallback() + + ispyb_handler.activity_gated_start(td.test_gridscan3d_start_document) # type:ignore + + ispyb_handler.activity_gated_start(td.test_do_fgs_start_document) # type:ignore + ispyb_handler.activity_gated_stop(td.test_do_fgs_gridscan_stop_document) + + ispyb_handler.activity_gated_descriptor( + td.test_descriptor_document_zocalo_reading + ) + ispyb_handler.activity_gated_event(td.test_zocalo_reading_event) + + assert ( + ispyb_handler.ispyb.append_to_comment.call_args.args[1] # type:ignore + == "Zocalo processing took 98.00 s. Zocalo found no crystals in this gridscan." + ) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_mapping.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_mapping.py new file mode 100644 index 000000000..9aef93d41 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_ispyb_mapping.py @@ -0,0 +1,85 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_mapping import ( + construct_comment_for_gridscan, +) +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +from ...conftest import ( + TEST_SAMPLE_ID, + default_raw_params, +) + + +@pytest.fixture +def dummy_params(): + dummy_params = ThreeDGridScan(**default_raw_params()) + dummy_params.sample_id = TEST_SAMPLE_ID + dummy_params.run_number = 0 + return dummy_params + + +@patch("ispyb.open", autospec=True) +def test_ispyb_deposition_rounds_position_to_int( + mock_ispyb_conn: MagicMock, + dummy_params, +): + assert ( + construct_comment_for_gridscan( + DataCollectionGridInfo( + 0.1, + 0.1, + 40, + 20, + 1.25, + 1.25, + 0.01, # type: ignore + 100, + Orientation.HORIZONTAL, + True, # type: ignore + ), + ) + == ( + "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 images " + "in 100.0 um by 100.0 um steps. Top left (px): [0,100], bottom right (px): [3200,1700]." + ) + ) + + +@pytest.mark.parametrize( + ["raw", "rounded"], + [ + (0.0012345, "1.2"), + (0.020000000, "20.0"), + (0.01999999, "20.0"), + (0.015257, "15.3"), + (0.0001234, "0.1"), + (0.0017345, "1.7"), + (0.0019945, "2.0"), + ], +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.ispyb_mapping.oav_utils.bottom_right_from_top_left", + autospec=True, +) +def test_ispyb_deposition_rounds_box_size_int( + bottom_right_from_top_left: MagicMock, + dummy_params: ThreeDGridScan, + raw, + rounded, +): + data_collection_grid_info = DataCollectionGridInfo( + raw, raw, 0, 0, 1.25, 1.25, 0, 0, Orientation.HORIZONTAL, True + ) + bottom_right_from_top_left.return_value = [0, 0] + + assert construct_comment_for_gridscan(data_collection_grid_info) == ( + "Hyperion: Xray centring - Diffraction grid scan of 0 by 0 images in " + f"{rounded} um by {rounded} um steps. Top left (px): [0,0], bottom right (px): [0,0]." + ) diff --git a/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_nexus_handler.py b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_nexus_handler.py new file mode 100644 index 000000000..49719af0a --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/callbacks/xray_centre/test_nexus_handler.py @@ -0,0 +1,132 @@ +from copy import deepcopy +from unittest.mock import MagicMock, patch + +import numpy as np +import pytest +from numpy.typing import DTypeLike + +from mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback import ( + GridscanNexusFileCallback, +) + +from ..conftest import TestData + + +@pytest.fixture +def nexus_writer(): + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.NexusWriter" + ) as nw: + yield nw + + +def test_writers_not_sDTypeLikeetup_on_plan_start_doc( + nexus_writer: MagicMock, +): + nexus_handler = GridscanNexusFileCallback() + nexus_writer.assert_not_called() + nexus_handler.activity_gated_start(TestData.test_start_document) + nexus_writer.assert_not_called() + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter" +) +def test_writers_dont_create_on_init_but_do_on_during_collection_read_event( + mock_nexus_writer: MagicMock, +): + mock_nexus_writer.side_effect = [MagicMock(), MagicMock()] + nexus_handler = GridscanNexusFileCallback() + + assert nexus_handler.nexus_writer_1 is None + assert nexus_handler.nexus_writer_2 is None + + nexus_handler.activity_gated_start(TestData.test_gridscan_outer_start_document) # type: ignore + nexus_handler.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + + nexus_handler.activity_gated_event( + TestData.test_event_document_during_data_collection + ) + + assert nexus_handler.nexus_writer_1 is not None + assert nexus_handler.nexus_writer_2 is not None + nexus_handler.nexus_writer_1.create_nexus_file.assert_called_once() + nexus_handler.nexus_writer_2.create_nexus_file.assert_called_once() + + +@pytest.mark.parametrize( + ["bit_depth", "vds_type"], + [ + (8, np.uint8), + (16, np.uint16), + (32, np.uint32), + ], +) +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter" +) +def test_given_different_bit_depths_then_writers_created_wth_correct_VDS_size( + mock_nexus_writer: MagicMock, + bit_depth: int, + vds_type: DTypeLike, +): + mock_nexus_writer.side_effect = [MagicMock(), MagicMock()] + nexus_handler = GridscanNexusFileCallback() + + nexus_handler.activity_gated_start(TestData.test_start_document) + nexus_handler.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + event_doc = deepcopy(TestData.test_event_document_during_data_collection) + event_doc["data"]["eiger_bit_depth"] = bit_depth + + nexus_handler.activity_gated_event(event_doc) + + assert nexus_handler.nexus_writer_1 is not None + assert nexus_handler.nexus_writer_2 is not None + nexus_handler.nexus_writer_1.create_nexus_file.assert_called_once_with( # type:ignore + vds_type + ) + nexus_handler.nexus_writer_2.create_nexus_file.assert_called_once_with( # type:ignore + vds_type + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.xray_centre.nexus_callback.NexusWriter" +) +def test_beam_and_attenuator_set_on_ispyb_transmission_event( + mock_nexus_writer: MagicMock, +): + mock_nexus_writer.side_effect = [MagicMock(), MagicMock()] + nexus_handler = GridscanNexusFileCallback() + + nexus_handler.activity_gated_start(TestData.test_start_document) + nexus_handler.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + nexus_handler.activity_gated_event( + TestData.test_event_document_during_data_collection + ) + + for writer in [nexus_handler.nexus_writer_1, nexus_handler.nexus_writer_2]: + assert writer is not None + assert writer.attenuator is not None + assert writer.beam is not None + + +def test_sensible_error_if_writing_triggered_before_params_received( + nexus_writer: MagicMock, +): + nexus_handler = GridscanNexusFileCallback() + with pytest.raises(AssertionError) as excinfo: + nexus_handler.activity_gated_descriptor( + TestData.test_descriptor_document_during_data_collection + ) + nexus_handler.activity_gated_event( + TestData.test_event_document_during_data_collection + ) + + assert "Nexus callback did not receive start doc" in excinfo.value.args[0] diff --git a/tests/unit_tests/hyperion/external_interaction/conftest.py b/tests/unit_tests/hyperion/external_interaction/conftest.py new file mode 100644 index 000000000..f5f2fd686 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/conftest.py @@ -0,0 +1,227 @@ +import glob +import os +from collections.abc import Callable, Sequence +from copy import deepcopy +from typing import Any +from unittest.mock import MagicMock, mock_open, patch + +import bluesky.plan_stubs as bps +import bluesky.preprocessors as bpp +import pytest +from bluesky.run_engine import RunEngine +from ispyb.sp.mxacquisition import MXAcquisition +from ophyd.sim import SynAxis + +from mx_bluesky.hyperion.external_interaction.callbacks.plan_reactive_callback import ( + PlanReactiveCallback, +) +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from mx_bluesky.hyperion.parameters.rotation import RotationScan +from mx_bluesky.hyperion.utils.utils import convert_angstrom_to_eV + +from ....conftest import raw_params_from_file + + +class MockReactiveCallback(PlanReactiveCallback): + activity_gated_start: MagicMock + activity_gated_descriptor: MagicMock + activity_gated_event: MagicMock + activity_gated_stop: MagicMock + + def __init__(self, *, emit: Callable[..., Any] | None = None) -> None: + super().__init__(MagicMock(), emit=emit) + self.activity_gated_start = MagicMock(name="activity_gated_start") # type: ignore + self.activity_gated_descriptor = MagicMock(name="activity_gated_descriptor") # type: ignore + self.activity_gated_event = MagicMock(name="activity_gated_event") # type: ignore + self.activity_gated_stop = MagicMock(name="activity_gated_stop") # type: ignore + + +@pytest.fixture +def mocked_test_callback(): + t = MockReactiveCallback() + return t + + +@pytest.fixture +def RE_with_mock_callback(mocked_test_callback): + RE = RunEngine() + RE.subscribe(mocked_test_callback) + yield RE, mocked_test_callback + + +def get_test_plan(callback_name): + s = SynAxis(name="fake_signal") + + @bpp.run_decorator(md={"activate_callbacks": [callback_name]}) + def test_plan(): + yield from bps.create() + yield from bps.read(s) + yield from bps.save() + + return test_plan, s + + +@pytest.fixture +def test_rotation_params(): + param_dict = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + param_dict["storage_directory"] = "tests/test_data" + param_dict["file_name"] = "TEST_FILENAME" + param_dict["demand_energy_ev"] = 12700 + param_dict["scan_width_deg"] = 360.0 + params = RotationScan(**param_dict) + params.x_start_um = 0 + params.y_start_um = 0 + params.z_start_um = 0 + params.exposure_time_s = 0.004 + return params + + +@pytest.fixture(params=[1050]) +def test_fgs_params(request): + assert request.param % 25 == 0, "Please use a multiple of 25 images" + params = ThreeDGridScan(**default_raw_params()) + params.demand_energy_ev = convert_angstrom_to_eV(1.0) + params.use_roi_mode = True + first_scan_img = (request.param // 10) * 6 + second_scan_img = (request.param // 10) * 4 + params.x_steps = 5 + params.y_steps = first_scan_img // 5 + params.z_steps = second_scan_img // 5 + params.storage_directory = ( + os.path.dirname(os.path.realpath(__file__)) + "/test_data" + ) + params.file_name = "dummy" + yield params + + +def _mock_ispyb_conn(base_ispyb_conn, position_id, dcgid, dcids, giids): + def upsert_data_collection(values): + kvpairs = remap_upsert_columns( + list(MXAcquisition.get_data_collection_params()), values + ) + if kvpairs["id"]: + return kvpairs["id"] + else: + return next(upsert_data_collection.i) # pyright: ignore + + mx_acq = base_ispyb_conn.return_value.mx_acquisition + mx_acq.upsert_data_collection.side_effect = upsert_data_collection + mx_acq.update_dc_position.return_value = position_id + mx_acq.upsert_data_collection_group.return_value = dcgid + + def upsert_dc_grid(values): + kvpairs = remap_upsert_columns(list(MXAcquisition.get_dc_grid_params()), values) + if kvpairs["id"]: + return kvpairs["id"] + else: + return next(upsert_dc_grid.i) # pyright: ignore + + upsert_data_collection.i = iter(dcids) # pyright: ignore + upsert_dc_grid.i = iter(giids) # pyright: ignore + + mx_acq.upsert_dc_grid.side_effect = upsert_dc_grid + return base_ispyb_conn + + +@pytest.fixture +def mock_ispyb_conn(base_ispyb_conn): + return _mock_ispyb_conn( + base_ispyb_conn, + TEST_POSITION_ID, + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_GRID_INFO_IDS, + ) + + +@pytest.fixture +def mock_ispyb_conn_multiscan(base_ispyb_conn): + return _mock_ispyb_conn( + base_ispyb_conn, + TEST_POSITION_ID, + TEST_DATA_COLLECTION_GROUP_ID, + list(range(12, 24)), + list(range(56, 68)), + ) + + +def mx_acquisition_from_conn(mock_ispyb_conn) -> MagicMock: + return mock_ispyb_conn.return_value.__enter__.return_value.mx_acquisition + + +def assert_upsert_call_with(call, param_template, expected: dict): + actual = remap_upsert_columns(list(param_template), call.args[0]) + assert actual == dict(param_template | expected) + + +TEST_DATA_COLLECTION_IDS = (12, 13) +TEST_DATA_COLLECTION_GROUP_ID = 34 +TEST_GRID_INFO_IDS = (56, 57) +TEST_POSITION_ID = 78 +TEST_SESSION_ID = 90 +EXPECTED_START_TIME = "2024-02-08 14:03:59" +EXPECTED_END_TIME = "2024-02-08 14:04:01" + + +def remap_upsert_columns(keys: Sequence[str], values: list): + return dict(zip(keys, values, strict=False)) + + +@pytest.fixture +def base_ispyb_conn(): + with patch("ispyb.open", mock_open()) as ispyb_connection: + mock_mx_acquisition = MagicMock() + mock_mx_acquisition.get_data_collection_group_params.side_effect = ( + lambda: deepcopy(MXAcquisition.get_data_collection_group_params()) + ) + + mock_mx_acquisition.get_data_collection_params.side_effect = lambda: deepcopy( + MXAcquisition.get_data_collection_params() + ) + mock_mx_acquisition.get_dc_position_params.side_effect = lambda: deepcopy( + MXAcquisition.get_dc_position_params() + ) + mock_mx_acquisition.get_dc_grid_params.side_effect = lambda: deepcopy( + MXAcquisition.get_dc_grid_params() + ) + ispyb_connection.return_value.mx_acquisition = mock_mx_acquisition + mock_core = MagicMock() + + def mock_retrieve_visit(visit_str): + assert visit_str, "No visit id supplied" + return TEST_SESSION_ID + + mock_core.retrieve_visit_id.side_effect = mock_retrieve_visit + ispyb_connection.return_value.core = mock_core + yield ispyb_connection + + +@pytest.fixture +def dummy_rotation_params(): + dummy_params = RotationScan( + **default_raw_params( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + ) + dummy_params.sample_id = TEST_SAMPLE_ID + + def clear_files(): + files = glob.glob(f"{dummy_params.storage_directory}*") + for f in files: + os.remove(f) + + clear_files() + yield dummy_params + clear_files() + + +TEST_SAMPLE_ID = 364758 +TEST_BARCODE = "12345A" + + +def default_raw_params( + json_file="tests/test_data/parameter_json_files/good_test_parameters.json", +): + return raw_params_from_file(json_file) diff --git a/tests/unit_tests/hyperion/external_interaction/ispyb/__init__.py b/tests/unit_tests/hyperion/external_interaction/ispyb/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/external_interaction/ispyb/conftest.py b/tests/unit_tests/hyperion/external_interaction/ispyb/conftest.py new file mode 100644 index 000000000..28478d1f8 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/ispyb/conftest.py @@ -0,0 +1,74 @@ +from copy import deepcopy + +import pytest + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import StoreInIspyb +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan + +from ..conftest import ( + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_SAMPLE_ID, + default_raw_params, +) + + +@pytest.fixture +def dummy_params(): + dummy_params = ThreeDGridScan(**default_raw_params()) + dummy_params.sample_id = TEST_SAMPLE_ID + dummy_params.run_number = 0 + return dummy_params + + +@pytest.fixture +def dummy_3d_gridscan_ispyb(): + store_in_ispyb_3d = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + return store_in_ispyb_3d + + +@pytest.fixture +def dummy_rotation_ispyb(dummy_rotation_params): + store_in_ispyb = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + return store_in_ispyb + + +@pytest.fixture +def dummy_2d_gridscan_ispyb(): + return StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + + +@pytest.fixture +def scan_xy_data_info_for_update( + scan_data_info_for_begin: ScanDataInfo, +) -> ScanDataInfo: + scan_data_info_for_update = deepcopy(scan_data_info_for_begin) + scan_data_info_for_update.data_collection_info.parent_id = ( + TEST_DATA_COLLECTION_GROUP_ID + ) + scan_data_info_for_update.data_collection_info.synchrotron_mode = "test" + scan_data_info_for_update.data_collection_info.flux = 10 + scan_data_info_for_update.data_collection_grid_info = DataCollectionGridInfo( + dx_in_mm=0.1, + dy_in_mm=0.1, + steps_x=40, + steps_y=20, + microns_per_pixel_x=1.25, + microns_per_pixel_y=1.25, + snapshot_offset_x_pixel=100, + snapshot_offset_y_pixel=100, + orientation=Orientation.HORIZONTAL, + snaked=True, + ) + scan_data_info_for_update.data_collection_position_info = ( + DataCollectionPositionInfo(0, 0, 0) + ) + scan_data_info_for_update.data_collection_id = TEST_DATA_COLLECTION_IDS[0] + return scan_data_info_for_update diff --git a/tests/unit_tests/hyperion/external_interaction/ispyb/test_expeye_interaction.py b/tests/unit_tests/hyperion/external_interaction/ispyb/test_expeye_interaction.py new file mode 100644 index 000000000..f16ac90f2 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/ispyb/test_expeye_interaction.py @@ -0,0 +1,139 @@ +from unittest.mock import ANY, patch + +import pytest + +from mx_bluesky.hyperion.external_interaction.exceptions import ISPyBDepositionNotMade +from mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store import ( + BearerAuth, + ExpeyeInteraction, + _get_base_url_and_token, +) + + +def test_get_url_and_token_returns_expected_data(): + url, token = _get_base_url_and_token() + assert url == "http://blah" + assert token == "notatoken" + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.post") +def test_when_start_load_called_then_correct_expected_url_posted_to_with_expected_data( + mock_post, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.start_load("test", 3, 700, 10, 5) + + mock_post.assert_called_once() + assert ( + mock_post.call_args.args[0] + == "http://blah/core/proposals/test/sessions/3/robot-actions" + ) + expected_data = { + "startTimestamp": ANY, + "sampleId": 700, + "actionType": "LOAD", + "containerLocation": 5, + "dewarLocation": 10, + } + assert mock_post.call_args.kwargs["json"] == expected_data + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.post") +def test_when_start_called_then_returns_id(mock_post): + mock_post.return_value.json.return_value = {"robotActionId": 190} + expeye_interactor = ExpeyeInteraction() + robot_id = expeye_interactor.start_load("test", 3, 700, 10, 5) + assert robot_id == 190 + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.post") +def test_when_start_load_called_then_use_correct_token( + mock_post, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.start_load("test", 3, 700, 10, 5) + + assert isinstance(auth := mock_post.call_args.kwargs["auth"], BearerAuth) + assert auth.token == "notatoken" + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.post") +def test_given_server_does_not_respond_when_start_load_called_then_error(mock_post): + mock_post.return_value.ok = False + + expeye_interactor = ExpeyeInteraction() + with pytest.raises(ISPyBDepositionNotMade): + expeye_interactor.start_load("test", 3, 700, 10, 5) + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.patch") +def test_when_end_load_called_with_success_then_correct_expected_url_posted_to_with_expected_data( + mock_patch, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.end_load(3, "success", "") + + mock_patch.assert_called_once() + assert mock_patch.call_args.args[0] == "http://blah/core/robot-actions/3" + expected_data = { + "endTimestamp": ANY, + "status": "SUCCESS", + "message": "", + } + assert mock_patch.call_args.kwargs["json"] == expected_data + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.patch") +def test_when_end_load_called_with_failure_then_correct_expected_url_posted_to_with_expected_data( + mock_patch, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.end_load(3, "fail", "bad") + + mock_patch.assert_called_once() + assert mock_patch.call_args.args[0] == "http://blah/core/robot-actions/3" + expected_data = { + "endTimestamp": ANY, + "status": "ERROR", + "message": "bad", + } + assert mock_patch.call_args.kwargs["json"] == expected_data + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.patch") +def test_when_end_load_called_then_use_correct_token( + mock_patch, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.end_load(3, "success", "") + + assert isinstance(auth := mock_patch.call_args.kwargs["auth"], BearerAuth) + assert auth.token == "notatoken" + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.patch") +def test_given_server_does_not_respond_when_end_load_called_then_error(mock_patch): + mock_patch.return_value.ok = False + + expeye_interactor = ExpeyeInteraction() + with pytest.raises(ISPyBDepositionNotMade): + expeye_interactor.end_load(1, "", "") + + +@patch("mx_bluesky.hyperion.external_interaction.ispyb.exp_eye_store.patch") +def test_when_update_barcode_called_with_success_then_correct_expected_url_posted_to_with_expected_data( + mock_patch, +): + expeye_interactor = ExpeyeInteraction() + expeye_interactor.update_barcode_and_snapshots( + 3, "test", "/tmp/before.jpg", "/tmp/after.jpg" + ) + + mock_patch.assert_called_once() + assert mock_patch.call_args.args[0] == "http://blah/core/robot-actions/3" + expected_data = { + "sampleBarcode": "test", + "xtalSnapshotBefore": "/tmp/before.jpg", + "xtalSnapshotAfter": "/tmp/after.jpg", + } + assert mock_patch.call_args.kwargs["json"] == expected_data diff --git a/tests/unit_tests/hyperion/external_interaction/ispyb/test_gridscan_ispyb_store_3d.py b/tests/unit_tests/hyperion/external_interaction/ispyb/test_gridscan_ispyb_store_3d.py new file mode 100644 index 000000000..6c5211dfc --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/ispyb/test_gridscan_ispyb_store_3d.py @@ -0,0 +1,766 @@ +from unittest.mock import MagicMock, patch + +import pytest +from ispyb.sp.mxacquisition import MXAcquisition + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGridInfo, + DataCollectionGroupInfo, + DataCollectionInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import Orientation +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) + +from ..conftest import ( + TEST_BARCODE, + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_GRID_INFO_IDS, + TEST_POSITION_ID, + TEST_SAMPLE_ID, + TEST_SESSION_ID, + assert_upsert_call_with, + mx_acquisition_from_conn, +) + +EXPECTED_START_TIME = "2024-02-08 14:03:59" +EXPECTED_END_TIME = "2024-02-08 14:04:01" + + +@pytest.fixture +def dummy_collection_group_info(): + return DataCollectionGroupInfo( + visit_string="cm31105-4", + experiment_type="Mesh3D", + sample_id=364758, + ) + + +@pytest.fixture +def scan_data_info_for_begin(): + return ScanDataInfo( + data_collection_info=DataCollectionInfo( + omega_start=0.0, + data_collection_number=1, + xtal_snapshot1="test_1_y", + xtal_snapshot2="test_2_y", + xtal_snapshot3="test_3_y", + n_images=800, + axis_range=0, + axis_end=0.0, + kappa_start=None, + parent_id=None, + visit_string="cm31105-4", + sample_id=364758, + detector_id=78, + axis_start=0.0, + focal_spot_size_at_samplex=0.0, + focal_spot_size_at_sampley=0.0, + slitgap_vertical=0.1, + slitgap_horizontal=0.1, + beamsize_at_samplex=0.1, + beamsize_at_sampley=0.1, + transmission=100.0, + comments="Hyperion: Xray centring - Diffraction grid scan of 40 by 20 images in 100.0 um by 100.0 um steps. Top left (px): [50,100], bottom right (px): [3250,1700].", + detector_distance=100.0, + exp_time=0.1, + imgdir="/tmp/", + file_template="file_name_0_master.h5", + imgprefix="file_name", + imgsuffix="h5", + n_passes=1, + overlap=0, + start_image_number=1, + wavelength=123.98419840550369, + xbeam=150.0, + ybeam=160.0, + synchrotron_mode=None, + undulator_gap1=1.0, + start_time=EXPECTED_START_TIME, + ), + data_collection_id=None, + data_collection_position_info=None, + data_collection_grid_info=None, + ) + + +@pytest.fixture +def scan_data_infos_for_update(): + scan_xy_data_info_for_update = ScanDataInfo( + data_collection_info=DataCollectionInfo( + omega_start=0.0, + data_collection_number=1, + xtal_snapshot1="test_1_y", + xtal_snapshot2="test_2_y", + xtal_snapshot3="test_3_y", + n_images=800, + axis_range=0, + axis_end=0.0, + kappa_start=None, + parent_id=34, + visit_string="cm31105-4", + sample_id=364758, + detector_id=78, + axis_start=0.0, + focal_spot_size_at_samplex=0.0, + focal_spot_size_at_sampley=0.0, + slitgap_vertical=0.1, + slitgap_horizontal=0.1, + beamsize_at_samplex=0.1, + beamsize_at_sampley=0.1, + transmission=100.0, + comments="Hyperion: Xray centring - Diffraction grid scan of 40 by 20 images in 100.0 um by 100.0 um steps. Top left (px): [50,100], bottom right (px): [3250,1700].", + detector_distance=100.0, + exp_time=0.1, + imgdir="/tmp/", + file_template="file_name_0_master.h5", + imgprefix="file_name", + imgsuffix="h5", + n_passes=1, + overlap=0, + flux=10.0, + start_image_number=1, + wavelength=123.98419840550369, + xbeam=150.0, + ybeam=160.0, + synchrotron_mode="test", + undulator_gap1=1.0, + start_time=EXPECTED_START_TIME, + ), + data_collection_id=TEST_DATA_COLLECTION_IDS[0], + data_collection_position_info=DataCollectionPositionInfo( + pos_x=0, pos_y=0, pos_z=0 + ), + data_collection_grid_info=DataCollectionGridInfo( + dx_in_mm=0.1, + dy_in_mm=0.1, + steps_x=40, + steps_y=20, + microns_per_pixel_x=1.25, + microns_per_pixel_y=1.25, + snapshot_offset_x_pixel=50, + snapshot_offset_y_pixel=100, + orientation=Orientation.HORIZONTAL, + snaked=True, + ), + ) + scan_xz_data_info_for_update = ScanDataInfo( + data_collection_info=DataCollectionInfo( + omega_start=90.0, + data_collection_number=1, + xtal_snapshot1="test_1_z", + xtal_snapshot2="test_2_z", + xtal_snapshot3="test_3_z", + n_images=400, + axis_range=0, + axis_end=90.0, + kappa_start=None, + parent_id=34, + visit_string="cm31105-4", + sample_id=364758, + detector_id=78, + axis_start=90.0, + focal_spot_size_at_samplex=0.0, + focal_spot_size_at_sampley=0.0, + slitgap_vertical=0.1, + slitgap_horizontal=0.1, + beamsize_at_samplex=0.1, + beamsize_at_sampley=0.1, + transmission=100.0, + comments="Hyperion: Xray centring - Diffraction grid scan of 40 by 10 images in 100.0 um by 200.0 um steps. Top left (px): [50,120], bottom right (px): [3250,1720].", + detector_distance=100.0, + exp_time=0.1, + imgdir="/tmp/", + file_template="file_name_1_master.h5", + imgprefix="file_name", + imgsuffix="h5", + n_passes=1, + overlap=0, + flux=10.0, + start_image_number=1, + wavelength=123.98419840550369, + xbeam=150.0, + ybeam=160.0, + synchrotron_mode="test", + undulator_gap1=1.0, + start_time=EXPECTED_START_TIME, + ), + data_collection_id=None, + data_collection_position_info=DataCollectionPositionInfo( + pos_x=0.0, pos_y=0.0, pos_z=0.0 + ), + data_collection_grid_info=DataCollectionGridInfo( + dx_in_mm=0.1, + dy_in_mm=0.2, + steps_x=40, + steps_y=10, + microns_per_pixel_x=1.25, + microns_per_pixel_y=1.25, + snapshot_offset_x_pixel=50, + snapshot_offset_y_pixel=120, + orientation=Orientation.HORIZONTAL, + snaked=True, + ), + ) + return [scan_xy_data_info_for_update, scan_xz_data_info_for_update] + + +def setup_mock_return_values(ispyb_conn): + mx_acquisition = ispyb_conn.return_value.__enter__.return_value.mx_acquisition + + mx_acquisition.get_data_collection_group_params = ( + MXAcquisition.get_data_collection_group_params + ) + mx_acquisition.get_data_collection_params = MXAcquisition.get_data_collection_params + mx_acquisition.get_dc_grid_params = MXAcquisition.get_dc_grid_params + mx_acquisition.get_dc_position_params = MXAcquisition.get_dc_position_params + + ispyb_conn.return_value.core.retrieve_visit_id.return_value = TEST_SESSION_ID + mx_acquisition.upsert_data_collection.side_effect = TEST_DATA_COLLECTION_IDS * 2 + mx_acquisition.update_dc_position.return_value = TEST_POSITION_ID + mx_acquisition.upsert_data_collection_group.return_value = ( + TEST_DATA_COLLECTION_GROUP_ID + ) + mx_acquisition.upsert_dc_grid.return_value = TEST_GRID_INFO_IDS[0] + + +def test_ispyb_deposition_comment_for_3D_correct( + mock_ispyb_conn: MagicMock, + dummy_3d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_data_infos_for_update, +): + mock_ispyb_conn = mock_ispyb_conn + mock_mx_aquisition = mx_acquisition_from_conn(mock_ispyb_conn) + mock_upsert_dc = mock_mx_aquisition.upsert_data_collection + ispyb_ids = dummy_3d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + dummy_3d_gridscan_ispyb.update_deposition(ispyb_ids, scan_data_infos_for_update) + + first_upserted_param_value_list = mock_upsert_dc.call_args_list[1][0][0] + second_upserted_param_value_list = mock_upsert_dc.call_args_list[2][0][0] + assert first_upserted_param_value_list[29] == ( + "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 images " + "in 100.0 um by 100.0 um steps. Top left (px): [50,100], bottom right (px): [3250,1700]." + ) + assert second_upserted_param_value_list[29] == ( + "Hyperion: Xray centring - Diffraction grid scan of 40 by 10 images " + "in 100.0 um by 200.0 um steps. Top left (px): [50,120], bottom right (px): [3250,1720]." + ) + + +def test_store_3d_grid_scan( + mock_ispyb_conn, + dummy_3d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_data_infos_for_update, +): + ispyb_ids = dummy_3d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + assert ispyb_ids == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + + assert dummy_3d_gridscan_ispyb.update_deposition( + ispyb_ids, scan_data_infos_for_update + ) == IspybIds( + data_collection_ids=TEST_DATA_COLLECTION_IDS, + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + grid_ids=TEST_GRID_INFO_IDS, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_begin_deposition( + mock_ispyb_conn, + dummy_3d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, +): + assert dummy_3d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx_acq.upsert_data_collection_group.mock_calls[0], + mx_acq.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "Mesh3D", + "sampleid": TEST_SAMPLE_ID, + }, + ) + mx_acq.upsert_data_collection.assert_called_once() + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + { + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": TEST_SAMPLE_ID, + "detectorid": 78, + "axisstart": 0.0, + "axisrange": 0, + "axisend": 0, + "focal_spot_size_at_samplex": 0.0, + "focal_spot_size_at_sampley": 0.0, + "slitgap_vertical": 0.1, + "slitgap_horizontal": 0.1, + "beamsize_at_samplex": 0.1, + "beamsize_at_sampley": 0.1, + "transmission": 100.0, + "comments": "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 " + "images in 100.0 um by 100.0 um steps. Top left (px): [50,100], " + "bottom right (px): [3250,1700].", + "data_collection_number": 1, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "omegastart": 0, + "start_image_number": 1, + "wavelength": 123.98419840550369, + "xbeam": 150.0, + "ybeam": 160.0, + "xtal_snapshot1": "test_1_y", + "xtal_snapshot2": "test_2_y", + "xtal_snapshot3": "test_3_y", + "synchrotron_mode": None, + "undulator_gap1": 1.0, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_0_master.h5", + "nimages": 40 * 20, + }, + ) + mx_acq.update_dc_position.assert_not_called() + mx_acq.upsert_dc_grid.assert_not_called() + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_update_deposition( + mock_ispyb_conn, + dummy_3d_gridscan_ispyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_data_infos_for_update, +): + ispyb_ids = dummy_3d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.assert_called_once() + mx_acq.upsert_data_collection.assert_called_once() + mx_acq.upsert_data_collection_group.reset_mock() + + dummy_collection_group_info.sample_barcode = TEST_BARCODE + + actual_rows = dummy_3d_gridscan_ispyb.update_deposition( + ispyb_ids, scan_data_infos_for_update + ) + + assert actual_rows == IspybIds( + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + data_collection_ids=TEST_DATA_COLLECTION_IDS, + grid_ids=TEST_GRID_INFO_IDS, + ) + + mx_acq.upsert_data_collection_group.assert_not_called() + + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[1], + mx_acq.get_data_collection_params(), + { + "id": 12, + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": TEST_SAMPLE_ID, + "detectorid": 78, + "axisstart": 0.0, + "axisrange": 0, + "axisend": 0, + "focal_spot_size_at_samplex": 0.0, + "focal_spot_size_at_sampley": 0.0, + "slitgap_vertical": 0.1, + "slitgap_horizontal": 0.1, + "beamsize_at_samplex": 0.1, + "beamsize_at_sampley": 0.1, + "transmission": 100.0, + "comments": "Hyperion: Xray centring - Diffraction grid scan of 40 by 20 " + "images in 100.0 um by 100.0 um steps. Top left (px): [50,100], " + "bottom right (px): [3250,1700].", + "data_collection_number": 1, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "flux": 10.0, + "omegastart": 0.0, + "start_image_number": 1, + "wavelength": 123.98419840550369, + "xbeam": 150.0, + "ybeam": 160.0, + "xtal_snapshot1": "test_1_y", + "xtal_snapshot2": "test_2_y", + "xtal_snapshot3": "test_3_y", + "synchrotron_mode": "test", + "undulator_gap1": 1.0, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_0_master.h5", + "nimages": 40 * 20, + }, + ) + + assert_upsert_call_with( + mx_acq.update_dc_position.mock_calls[0], + mx_acq.get_dc_position_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "pos_x": 0, + "pos_y": 0, + "pos_z": 0, + }, + ) + + assert_upsert_call_with( + mx_acq.upsert_dc_grid.mock_calls[0], + mx_acq.get_dc_grid_params(), + { + "parentid": TEST_DATA_COLLECTION_IDS[0], + "dxinmm": 0.1, + "dyinmm": 0.1, + "stepsx": 40, + "stepsy": 20, + "micronsperpixelx": 1.25, + "micronsperpixely": 1.25, + "snapshotoffsetxpixel": 50, + "snapshotoffsetypixel": 100, + "orientation": "horizontal", + "snaked": True, + }, + ) + + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[2], + mx_acq.get_data_collection_params(), + { + "id": None, + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": TEST_SAMPLE_ID, + "detectorid": 78, + "axisstart": 90.0, + "axisrange": 0, + "axisend": 90.0, + "focal_spot_size_at_samplex": 0.0, + "focal_spot_size_at_sampley": 0.0, + "slitgap_vertical": 0.1, + "slitgap_horizontal": 0.1, + "beamsize_at_samplex": 0.1, + "beamsize_at_sampley": 0.1, + "transmission": 100.0, + "comments": "Hyperion: Xray centring - Diffraction grid scan of 40 by 10 " + "images in 100.0 um by 200.0 um steps. Top left (px): [50,120], " + "bottom right (px): [3250,1720].", + "data_collection_number": 1, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "flux": 10.0, + "omegastart": 90.0, + "start_image_number": 1, + "wavelength": 123.98419840550369, + "xbeam": 150.0, + "ybeam": 160.0, + "xtal_snapshot1": "test_1_z", + "xtal_snapshot2": "test_2_z", + "xtal_snapshot3": "test_3_z", + "synchrotron_mode": "test", + "undulator_gap1": 1.0, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_1_master.h5", + "nimages": 40 * 10, + }, + ) + + assert_upsert_call_with( + mx_acq.update_dc_position.mock_calls[1], + mx_acq.get_dc_position_params(), + { + "id": TEST_DATA_COLLECTION_IDS[1], + "pos_x": 0, + "pos_y": 0, + "pos_z": 0, + }, + ) + + assert_upsert_call_with( + mx_acq.upsert_dc_grid.mock_calls[1], + mx_acq.get_dc_grid_params(), + { + "parentid": TEST_DATA_COLLECTION_IDS[1], + "dxinmm": 0.1, + "dyinmm": 0.2, + "stepsx": 40, + "stepsy": 10, + "micronsperpixelx": 1.25, + "micronsperpixely": 1.25, + "snapshotoffsetxpixel": 50, + "snapshotoffsetypixel": 120, + "orientation": "horizontal", + "snaked": True, + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +@patch( + "mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store.get_current_time_string", +) +def test_end_deposition_happy_path( + get_current_time, + mock_ispyb_conn, + dummy_3d_gridscan_ispyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_data_infos_for_update, +): + ispyb_ids = dummy_3d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert len(mx_acq.upsert_data_collection_group.mock_calls) == 1 + ispyb_ids = dummy_3d_gridscan_ispyb.update_deposition( + ispyb_ids, scan_data_infos_for_update + ) + assert len(mx_acq.upsert_data_collection_group.mock_calls) == 1 + assert len(mx_acq.upsert_data_collection.mock_calls) == 3 + assert len(mx_acq.upsert_dc_grid.mock_calls) == 2 + + get_current_time.return_value = EXPECTED_END_TIME + dummy_3d_gridscan_ispyb.end_deposition(ispyb_ids, "success", "Test succeeded") + assert mx_acq.update_data_collection_append_comments.call_args_list[0] == ( + ( + TEST_DATA_COLLECTION_IDS[0], + "DataCollection Successful reason: Test succeeded", + " ", + ), + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[3], + mx_acq.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "endtime": EXPECTED_END_TIME, + "runstatus": "DataCollection Successful", + }, + ) + assert mx_acq.update_data_collection_append_comments.call_args_list[1] == ( + ( + TEST_DATA_COLLECTION_IDS[1], + "DataCollection Successful reason: Test succeeded", + " ", + ), + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[4], + mx_acq.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[1], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "endtime": EXPECTED_END_TIME, + "runstatus": "DataCollection Successful", + }, + ) + + +def test_param_keys( + mock_ispyb_conn, + dummy_2d_gridscan_ispyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, +): + ispyb_ids = dummy_2d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + assert dummy_2d_gridscan_ispyb.update_deposition( + ispyb_ids, [scan_xy_data_info_for_update] + ) == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + grid_ids=(TEST_GRID_INFO_IDS[0],), + ) + + +def _test_when_grid_scan_stored_then_data_present_in_upserts( + ispyb_conn, + dummy_ispyb, + test_function, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_data_info_for_update, + test_group=False, +): + setup_mock_return_values(ispyb_conn) + ispyb_ids = dummy_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + dummy_ispyb.update_deposition(ispyb_ids, [scan_data_info_for_update]) + + mx_acquisition = ispyb_conn.return_value.__enter__.return_value.mx_acquisition + + upsert_data_collection_arg_list = ( + mx_acquisition.upsert_data_collection.call_args_list[1][0] + ) + actual = upsert_data_collection_arg_list[0] + assert test_function(MXAcquisition.get_data_collection_params(), actual) + + if test_group: + upsert_data_collection_group_arg_list = ( + mx_acquisition.upsert_data_collection_group.call_args_list[0][0] + ) + actual = upsert_data_collection_group_arg_list[0] + assert test_function(MXAcquisition.get_data_collection_group_params(), actual) + + +@patch("ispyb.open", autospec=True) +def test_given_sampleid_of_none_when_grid_scan_stored_then_sample_id_not_set( + ispyb_conn, + dummy_2d_gridscan_ispyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, +): + dummy_collection_group_info.sample_id = None + scan_data_info_for_begin.data_collection_info.sample_id = None + scan_xy_data_info_for_update.data_collection_info.sample_id = None + + def test_sample_id(default_params, actual): + sampleid_idx = list(default_params).index("sampleid") + return actual[sampleid_idx] == default_params["sampleid"] + + _test_when_grid_scan_stored_then_data_present_in_upserts( + ispyb_conn, + dummy_2d_gridscan_ispyb, + test_sample_id, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, + True, + ) + + +@patch("ispyb.open", autospec=True) +def test_given_real_sampleid_when_grid_scan_stored_then_sample_id_set( + ispyb_conn, + dummy_2d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, +): + expected_sample_id = 364758 + + def test_sample_id(default_params, actual): + sampleid_idx = list(default_params).index("sampleid") + return actual[sampleid_idx] == expected_sample_id + + _test_when_grid_scan_stored_then_data_present_in_upserts( + ispyb_conn, + dummy_2d_gridscan_ispyb, + test_sample_id, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, + True, + ) + + +def test_fail_result_run_results_in_bad_run_status( + mock_ispyb_conn: MagicMock, + dummy_2d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, +): + mock_ispyb_conn = mock_ispyb_conn + mock_mx_aquisition = ( + mock_ispyb_conn.return_value.__enter__.return_value.mx_acquisition + ) + mock_upsert_data_collection = mock_mx_aquisition.upsert_data_collection + + ispyb_ids = dummy_2d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + ispyb_ids = dummy_2d_gridscan_ispyb.update_deposition( + ispyb_ids, [scan_xy_data_info_for_update] + ) + dummy_2d_gridscan_ispyb.end_deposition(ispyb_ids, "fail", "test specifies failure") + + mock_upsert_data_collection_calls = mock_upsert_data_collection.call_args_list + end_deposition_upsert_args = mock_upsert_data_collection_calls[2][0] + upserted_param_value_list = end_deposition_upsert_args[0] + assert "DataCollection Unsuccessful" in upserted_param_value_list + assert "DataCollection Successful" not in upserted_param_value_list + + +def test_no_exception_during_run_results_in_good_run_status( + mock_ispyb_conn: MagicMock, + dummy_2d_gridscan_ispyb: StoreInIspyb, + dummy_collection_group_info, + scan_data_info_for_begin, + scan_xy_data_info_for_update, +): + mock_ispyb_conn = mock_ispyb_conn + setup_mock_return_values(mock_ispyb_conn) + mock_mx_aquisition = ( + mock_ispyb_conn.return_value.__enter__.return_value.mx_acquisition + ) + mock_upsert_data_collection = mock_mx_aquisition.upsert_data_collection + + ispyb_ids = dummy_2d_gridscan_ispyb.begin_deposition( + dummy_collection_group_info, [scan_data_info_for_begin] + ) + ispyb_ids = dummy_2d_gridscan_ispyb.update_deposition( + ispyb_ids, [scan_xy_data_info_for_update] + ) + dummy_2d_gridscan_ispyb.end_deposition(ispyb_ids, "success", "") + + mock_upsert_data_collection_calls = mock_upsert_data_collection.call_args_list + end_deposition_upsert_args = mock_upsert_data_collection_calls[2][0] + upserted_param_value_list = end_deposition_upsert_args[0] + assert "DataCollection Unsuccessful" not in upserted_param_value_list + assert "DataCollection Successful" in upserted_param_value_list diff --git a/tests/unit_tests/hyperion/external_interaction/ispyb/test_rotation_ispyb_store.py b/tests/unit_tests/hyperion/external_interaction/ispyb/test_rotation_ispyb_store.py new file mode 100644 index 000000000..130aedbe8 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/ispyb/test_rotation_ispyb_store.py @@ -0,0 +1,507 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from mx_bluesky.hyperion.external_interaction.ispyb.data_model import ( + DataCollectionGroupInfo, + DataCollectionInfo, + DataCollectionPositionInfo, + ScanDataInfo, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store import ( + IspybIds, + StoreInIspyb, +) +from mx_bluesky.hyperion.parameters.constants import CONST + +from ..conftest import ( + EXPECTED_END_TIME, + EXPECTED_START_TIME, + TEST_BARCODE, + TEST_DATA_COLLECTION_GROUP_ID, + TEST_DATA_COLLECTION_IDS, + TEST_SAMPLE_ID, + TEST_SESSION_ID, + assert_upsert_call_with, + mx_acquisition_from_conn, +) + +EXPECTED_DATA_COLLECTION = { + "visitid": TEST_SESSION_ID, + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "sampleid": None, + "detectorid": 78, + "axisstart": 0.0, + "axisrange": 0.1, + "axisend": 180, + "focal_spot_size_at_samplex": 1.0, + "focal_spot_size_at_sampley": 1.0, + "beamsize_at_samplex": 1, + "beamsize_at_sampley": 1, + "transmission": 100.0, + "comments": "Hyperion rotation scan", + "data_collection_number": 0, + "detector_distance": 100.0, + "exp_time": 0.1, + "imgdir": "/tmp/", + "imgprefix": "file_name", + "imgsuffix": "h5", + "n_passes": 1, + "overlap": 0, + "omegastart": 0, + "start_image_number": 1, + "wavelength": 123.98419840550369, + "xbeam": 150.0, + "ybeam": 160.0, + "xtal_snapshot1": "test_1_y", + "xtal_snapshot2": "test_2_y", + "xtal_snapshot3": "test_3_y", + "synchrotron_mode": None, + "starttime": EXPECTED_START_TIME, + "filetemplate": "file_name_1_master.h5", + "nimages": 1800, + "kappastart": 0, +} + + +@pytest.fixture +def dummy_rotation_data_collection_group_info(): + return DataCollectionGroupInfo( + visit_string="cm31105-4", + experiment_type="SAD", + sample_id=364758, + ) + + +@pytest.fixture +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def scan_data_info_for_begin(): + return ScanDataInfo( + data_collection_info=DataCollectionInfo( + omega_start=0.0, + data_collection_number=0, + xtal_snapshot1="test_1_y", + xtal_snapshot2="test_2_y", + xtal_snapshot3="test_3_y", + n_images=1800, + axis_range=0.1, + axis_end=180.0, + kappa_start=0.0, + parent_id=None, + visit_string="cm31105-4", + sample_id=364758, + detector_id=78, + axis_start=0.0, + focal_spot_size_at_samplex=1.0, + focal_spot_size_at_sampley=1.0, + beamsize_at_samplex=1.0, + beamsize_at_sampley=1.0, + transmission=100.0, + comments="Hyperion rotation scan", + detector_distance=100.0, + exp_time=0.1, + imgdir="/tmp/", + file_template="file_name_1_master.h5", + imgprefix="file_name", + imgsuffix="h5", + n_passes=1, + overlap=0, + start_image_number=1, + wavelength=123.98419840550369, + xbeam=150.0, + ybeam=160.0, + synchrotron_mode=None, + undulator_gap1=None, + start_time="2024-02-08 14:03:59", + ), + data_collection_id=None, + data_collection_position_info=None, + data_collection_grid_info=None, + ) + + +@pytest.fixture +def scan_data_info_for_update(scan_data_info_for_begin): + return ScanDataInfo( + data_collection_info=DataCollectionInfo( + omega_start=0.0, + data_collection_number=0, + xtal_snapshot1="test_1_y", + xtal_snapshot2="test_2_y", + xtal_snapshot3="test_3_y", + n_images=1800, + axis_range=0.1, + axis_end=180.0, + kappa_start=0.0, + parent_id=None, + visit_string="cm31105-4", + detector_id=78, + axis_start=0.0, + focal_spot_size_at_samplex=1.0, + focal_spot_size_at_sampley=1.0, + slitgap_vertical=1.0, + slitgap_horizontal=1.0, + beamsize_at_samplex=1.0, + beamsize_at_sampley=1.0, + transmission=100.0, + comments="Hyperion rotation scan", + detector_distance=100.0, + exp_time=0.1, + imgdir="/tmp/", + file_template="file_name_1_master.h5", + imgprefix="file_name", + imgsuffix="h5", + n_passes=1, + overlap=0, + flux=10.0, + start_image_number=1, + wavelength=123.98419840550369, + xbeam=150.0, + ybeam=160.0, + synchrotron_mode="test", + undulator_gap1=None, + start_time="2024-02-08 14:03:59", + ), + data_collection_id=11, + data_collection_position_info=DataCollectionPositionInfo( + pos_x=10.0, pos_y=20.0, pos_z=30.0 + ), + data_collection_grid_info=None, + ) + + +@pytest.fixture +def dummy_rotation_ispyb_with_experiment_type(): + store_in_ispyb = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + return store_in_ispyb + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_begin_deposition( + mock_ispyb_conn, + dummy_rotation_ispyb, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, +): + assert scan_data_info_for_begin.data_collection_info.parent_id is None + + assert dummy_rotation_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + + assert ( + scan_data_info_for_begin.data_collection_info.parent_id + == TEST_DATA_COLLECTION_GROUP_ID + ) + + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx_acq.upsert_data_collection_group.mock_calls[0], + mx_acq.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "SAD", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION | {"sampleid": TEST_SAMPLE_ID}, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_begin_deposition_with_group_id_updates_but_doesnt_insert( + mock_ispyb_conn, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, +): + dummy_rotation_ispyb = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + scan_data_info_for_begin.data_collection_info.parent_id = ( + TEST_DATA_COLLECTION_GROUP_ID + ) + + assert dummy_rotation_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx_acq.upsert_data_collection_group.mock_calls[0], + mx_acq.get_data_collection_group_params(), + { + "id": TEST_DATA_COLLECTION_GROUP_ID, + "parentid": TEST_SESSION_ID, + "experimenttype": "SAD", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert ( + scan_data_info_for_begin.data_collection_info.parent_id + == TEST_DATA_COLLECTION_GROUP_ID + ) + + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION | {"sampleid": TEST_SAMPLE_ID}, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_begin_deposition_with_alternate_experiment_type( + mock_ispyb_conn, + dummy_rotation_ispyb_with_experiment_type, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, +): + dummy_rotation_data_collection_group_info.experiment_type = "Characterization" + assert dummy_rotation_ispyb_with_experiment_type.begin_deposition( + dummy_rotation_data_collection_group_info, + [scan_data_info_for_begin], + ) == IspybIds( + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx_acq.upsert_data_collection_group.mock_calls[0], + mx_acq.get_data_collection_group_params(), + { + "parentid": TEST_SESSION_ID, + "experimenttype": "Characterization", + "sampleid": TEST_SAMPLE_ID, + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_update_deposition( + mock_ispyb_conn, + dummy_rotation_ispyb, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, + scan_data_info_for_update, +): + ispyb_ids = dummy_rotation_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + + scan_data_info_for_update.data_collection_info.parent_id = ( + ispyb_ids.data_collection_group_id + ) + scan_data_info_for_update.data_collection_id = ispyb_ids.data_collection_ids[0] + dummy_rotation_data_collection_group_info.sample_barcode = TEST_BARCODE + + assert dummy_rotation_ispyb.update_deposition( + ispyb_ids, + [scan_data_info_for_update], + ) == IspybIds( + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + ) + mx_acq.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION + | { + "id": TEST_DATA_COLLECTION_IDS[0], + "synchrotron_mode": "test", + "slitgap_vertical": 1, + "slitgap_horizontal": 1, + "flux": 10, + }, + ) + + assert_upsert_call_with( + mx_acq.update_dc_position.mock_calls[0], + mx_acq.get_dc_position_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "pos_x": 10, + "pos_y": 20, + "pos_z": 30, + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +def test_update_deposition_with_group_id_updates( + mock_ispyb_conn, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, + scan_data_info_for_update, +): + dummy_rotation_ispyb = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + scan_data_info_for_begin.data_collection_info.parent_id = ( + TEST_DATA_COLLECTION_GROUP_ID + ) + ispyb_ids = dummy_rotation_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + + scan_data_info_for_update.data_collection_info.parent_id = ( + ispyb_ids.data_collection_group_id + ) + scan_data_info_for_update.data_collection_id = ispyb_ids.data_collection_ids[0] + dummy_rotation_data_collection_group_info.sample_barcode = TEST_BARCODE + assert dummy_rotation_ispyb.update_deposition( + ispyb_ids, + [scan_data_info_for_update], + ) == IspybIds( + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + data_collection_ids=(TEST_DATA_COLLECTION_IDS[0],), + ) + mx_acq.upsert_data_collection_group.assert_not_called() + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + EXPECTED_DATA_COLLECTION + | { + "id": TEST_DATA_COLLECTION_IDS[0], + "synchrotron_mode": "test", + "slitgap_vertical": 1, + "slitgap_horizontal": 1, + "flux": 10, + }, + ) + + assert_upsert_call_with( + mx_acq.update_dc_position.mock_calls[0], + mx_acq.get_dc_position_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "pos_x": 10, + "pos_y": 20, + "pos_z": 30, + }, + ) + + +@patch( + "mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping.get_current_time_string", + new=MagicMock(return_value=EXPECTED_START_TIME), +) +@patch( + "mx_bluesky.hyperion.external_interaction.ispyb.ispyb_store.get_current_time_string", +) +def test_end_deposition_happy_path( + get_current_time, + mock_ispyb_conn, + dummy_rotation_ispyb, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, + scan_data_info_for_update, +): + ispyb_ids = dummy_rotation_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) + scan_data_info_for_update.data_collection_info.parent_id = ( + ispyb_ids.data_collection_group_id + ) + scan_data_info_for_update.data_collection_id = ispyb_ids.data_collection_ids[0] + ispyb_ids = dummy_rotation_ispyb.update_deposition( + ispyb_ids, + [scan_data_info_for_update], + ) + mx_acq = mx_acquisition_from_conn(mock_ispyb_conn) + mx_acq.upsert_data_collection_group.reset_mock() + mx_acq.upsert_data_collection.reset_mock() + mx_acq.upsert_dc_grid.reset_mock() + + get_current_time.return_value = EXPECTED_END_TIME + dummy_rotation_ispyb.end_deposition(ispyb_ids, "success", "Test succeeded") + assert mx_acq.update_data_collection_append_comments.call_args_list[0] == ( + ( + TEST_DATA_COLLECTION_IDS[0], + "DataCollection Successful reason: Test succeeded", + " ", + ), + ) + assert_upsert_call_with( + mx_acq.upsert_data_collection.mock_calls[0], + mx_acq.get_data_collection_params(), + { + "id": TEST_DATA_COLLECTION_IDS[0], + "parentid": TEST_DATA_COLLECTION_GROUP_ID, + "endtime": EXPECTED_END_TIME, + "runstatus": "DataCollection Successful", + }, + ) + assert len(mx_acq.upsert_data_collection.mock_calls) == 1 + + +def test_store_rotation_scan_failures( + mock_ispyb_conn, dummy_rotation_ispyb: StoreInIspyb +): + ispyb_ids = IspybIds( + data_collection_group_id=TEST_DATA_COLLECTION_GROUP_ID, + ) + with pytest.raises(AssertionError): + dummy_rotation_ispyb.end_deposition(ispyb_ids, "", "") + + +@pytest.mark.parametrize("dcgid", [2, 45, 61, 88, 13, 25]) +def test_store_rotation_scan_uses_supplied_dcgid( + mock_ispyb_conn, + dcgid, + dummy_rotation_data_collection_group_info, + scan_data_info_for_begin, + scan_data_info_for_update, +): + mock_ispyb_conn.return_value.mx_acquisition.upsert_data_collection_group.return_value = dcgid + store_in_ispyb = StoreInIspyb(CONST.SIM.ISPYB_CONFIG) + scan_data_info_for_begin.data_collection_info.parent_id = dcgid + ispyb_ids = store_in_ispyb.begin_deposition( + dummy_rotation_data_collection_group_info, [scan_data_info_for_begin] + ) + assert ispyb_ids.data_collection_group_id == dcgid + mx = mx_acquisition_from_conn(mock_ispyb_conn) + assert_upsert_call_with( + mx.upsert_data_collection_group.mock_calls[0], + mx.get_data_collection_group_params(), + { + "id": dcgid, + "parentid": TEST_SESSION_ID, + "experimenttype": "SAD", + "sampleid": TEST_SAMPLE_ID, + }, + ) + assert ( + store_in_ispyb.update_deposition( + ispyb_ids, + [scan_data_info_for_update], + ).data_collection_group_id + == dcgid + ) diff --git a/tests/unit_tests/hyperion/external_interaction/nexus/__init__.py b/tests/unit_tests/hyperion/external_interaction/nexus/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/external_interaction/nexus/test_compare_nexus_to_gda_exhaustively.py b/tests/unit_tests/hyperion/external_interaction/nexus/test_compare_nexus_to_gda_exhaustively.py new file mode 100644 index 000000000..9c43c73d8 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/nexus/test_compare_nexus_to_gda_exhaustively.py @@ -0,0 +1,271 @@ +import shutil +from pathlib import Path +from unittest.mock import MagicMock + +import h5py +import pytest +from h5py import Dataset, Datatype, File, Group +from numpy import dtype + +from mx_bluesky.hyperion.utils.validation import _generate_fake_nexus + +from .....conftest import extract_metafile + +TEST_DATA_DIRECTORY = Path("tests/test_data/nexus_files/rotation") +TEST_EXAMPLE_NEXUS_FILE = Path("ins_8_5.nxs") +TEST_NEXUS_FILENAME = "rotation_scan_test_nexus" +TEST_METAFILE = "ins_8_5_meta.h5.gz" +FAKE_DATAFILE = "../fake_data.h5" + +h5item = Group | Dataset | File | Datatype + + +def get_groups(dataset: h5py.File) -> set: + e = set() + + def add_layer(s: set, d: h5item): + if isinstance(d, h5py.Group): + for k in d: + s.add(d.name) + add_layer(s, d[k]) + + add_layer(e, dataset) + return e + + +def has_equiv_in(item: str, groups: set, exception_table: dict[str, set[str]]): + if item not in exception_table.keys(): + return False + # one of the items in exception_table[item] must be in the tested groups + return exception_table[item] & groups != set() + + +def test_has_equiv_in(): + test_table = {"a": {"b", "c"}} + assert not has_equiv_in("a", {"x", "y", "z"}, test_table) + assert has_equiv_in("a", {"x", "y", "c"}, test_table) + + +FilesAndgroups = tuple[h5py.File, set[str], h5py.File, set[str]] + + +@pytest.fixture +def files_and_groups(tmpdir): + filename, run_number = _generate_fake_nexus(TEST_NEXUS_FILENAME, tmpdir) + extract_metafile( + str(TEST_DATA_DIRECTORY / TEST_METAFILE), + f"{tmpdir}/{filename}_{run_number}_meta.h5", + ) + extract_metafile( + str(TEST_DATA_DIRECTORY / TEST_METAFILE), + f"{tmpdir}/ins_8_5_meta.h5", + ) + new_hyperion_master = tmpdir / f"{filename}_{run_number}.nxs" + new_gda_master = tmpdir / TEST_EXAMPLE_NEXUS_FILE + new_gda_data = [tmpdir / f"ins_8_5_00000{n}.h5" for n in [1, 2, 3, 4]] + new_hyp_data = [ + tmpdir / f"{filename}_{run_number}_00000{n}.h5" for n in [1, 2, 3, 4] + ] + shutil.copy(TEST_DATA_DIRECTORY / TEST_EXAMPLE_NEXUS_FILE, new_gda_master) + [shutil.copy(TEST_DATA_DIRECTORY / FAKE_DATAFILE, d) for d in new_gda_data] + [shutil.copy(TEST_DATA_DIRECTORY / FAKE_DATAFILE, d) for d in new_hyp_data] + + with ( + h5py.File(new_gda_master, "r") as example_nexus, + h5py.File(new_hyperion_master, "r") as hyperion_nexus, + ): + yield ( + example_nexus, + get_groups(example_nexus), + hyperion_nexus, + get_groups(hyperion_nexus), + ) + + +GROUPS_EQUIVALENTS_TABLE = { + "/entry/instrument/source": {"/entry/source"}, + "/entry/instrument/detector_z": {"/entry/instrument/detector/detector_z"}, + "/entry/instrument/transformations": {"/entry/instrument/detector/transformations"}, +} +GROUPS_EXCEPTIONS = {"/entry/instrument/attenuator"} + + +def test_hyperion_rotation_nexus_groups_against_gda( + files_and_groups: FilesAndgroups, +): + _, gda_groups, _, hyperion_groups = files_and_groups + for item in gda_groups: + assert ( + item in hyperion_groups + or has_equiv_in(item, hyperion_groups, GROUPS_EQUIVALENTS_TABLE) + or item in GROUPS_EXCEPTIONS + ) + + +DATATYPE_EXCEPTION_TABLE = { + "/entry/instrument/detector/bit_depth_image": ( + dtype("int64"), + "gda item bit_depth_image not present", + ), + "/entry/instrument/detector/depends_on": (dtype("S48"), dtype("S1024")), + "/entry/instrument/detector/description": (dtype("S9"), dtype("S1024")), + "/entry/instrument/detector/detector_readout_time": ( + dtype("int64"), + "gda item detector_readout_time not present", + ), + "/entry/instrument/detector/distance": ( + dtype("= 2 + assert ( + single_dummy_file.detector.detector_params.image_size[0] == PIXELS_Y_EIGER2_X_4M + ) + assert ( + single_dummy_file.detector.detector_params.image_size[1] == PIXELS_X_EIGER2_X_4M + ) + + +@patch.dict(os.environ, {"BEAMLINE": "I03"}) +def test_nexus_writer_writes_beamline_name_correctly( + test_fgs_params: ThreeDGridScan, +): + d_size = test_fgs_params.detector_params.detector_size_constants.det_size_pixels + data_shape = (test_fgs_params.num_images, d_size.width, d_size.height) + nexus_writer = NexusWriter(test_fgs_params, data_shape, test_fgs_params.scan_points) + assert nexus_writer.source.beamline == "I03" + + +def check_validity_through_zocalo(nexus_writers: tuple[NexusWriter, NexusWriter]): + import dlstbx.swmr.h5check + + nexus_writer_1, nexus_writer_2 = nexus_writers + + nexus_writer_1.create_nexus_file(np.uint16) + nexus_writer_2.create_nexus_file(np.uint16) + + for filename in [nexus_writer_1.nexus_file, nexus_writer_1.master_file]: + with h5py.File(filename, "r") as written_nexus_file: + dlstbx.swmr.h5check.get_real_frames( + written_nexus_file, written_nexus_file["entry/data/data"] + ) + + for filename in [nexus_writer_2.nexus_file, nexus_writer_2.master_file]: + with h5py.File(filename, "r") as written_nexus_file: + dlstbx.swmr.h5check.get_real_frames( + written_nexus_file, written_nexus_file["entry/data/data"] + ) + + +@pytest.mark.dlstbx +def test_nexus_file_validity_for_zocalo_with_two_linked_datasets( + dummy_nexus_writers: tuple[NexusWriter, NexusWriter], +): + check_validity_through_zocalo(dummy_nexus_writers) + + +@pytest.mark.dlstbx +def test_nexus_file_validity_for_zocalo_with_three_linked_datasets( + dummy_nexus_writers_with_more_images: tuple[NexusWriter, NexusWriter], +): + check_validity_through_zocalo(dummy_nexus_writers_with_more_images) + + +@pytest.mark.skip("Requires #87 of nexgen") +def test_given_some_datafiles_outside_of_VDS_range_THEN_they_are_not_in_nexus_file( + dummy_nexus_writers_with_more_images: tuple[NexusWriter, NexusWriter], +): + nexus_writer_1, nexus_writer_2 = dummy_nexus_writers_with_more_images + + nexus_writer_1.create_nexus_file(np.uint16) + nexus_writer_2.create_nexus_file(np.uint16) + + for filename in [nexus_writer_1.nexus_file, nexus_writer_1.master_file]: + with h5py.File(filename, "r") as written_nexus_file: + assert isinstance(data := written_nexus_file["entry/data"], h5py.Dataset) + assert "data_000001" in data + assert "data_000002" in data + assert "data_000003" not in data + + for filename in [nexus_writer_2.nexus_file, nexus_writer_2.master_file]: + with h5py.File(filename, "r") as written_nexus_file: + assert isinstance(data := written_nexus_file["entry/data"], h5py.Dataset) + assert "data_000001" not in data + assert "data_000002" in data + assert "data_000003" in data + + +def test_given_data_files_not_yet_written_when_nexus_files_created_then_nexus_files_still_written( + test_fgs_params: ThreeDGridScan, +): + test_fgs_params.file_name = "non_existant_file" + with create_nexus_writers(test_fgs_params) as ( + nexus_writer_1, + nexus_writer_2, + ): + nexus_writer_1.create_nexus_file(np.uint16) + nexus_writer_2.create_nexus_file(np.uint16) + + for filename in [ + nexus_writer_1.nexus_file, + nexus_writer_1.master_file, + nexus_writer_1.nexus_file, + nexus_writer_1.master_file, + ]: + assert os.path.exists(filename) diff --git a/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000001.h5 b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000001.h5 new file mode 100644 index 000000000..488925ffb Binary files /dev/null and b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000001.h5 differ diff --git a/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000002.h5 b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000002.h5 new file mode 100644 index 000000000..5662609e0 Binary files /dev/null and b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000002.h5 differ diff --git a/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000003.h5 b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000003.h5 new file mode 100644 index 000000000..5662609e0 Binary files /dev/null and b/tests/unit_tests/hyperion/external_interaction/test_data/dummy_1_000003.h5 differ diff --git a/tests/unit_tests/hyperion/external_interaction/test_ispyb_dataclass.py b/tests/unit_tests/hyperion/external_interaction/test_ispyb_dataclass.py new file mode 100644 index 000000000..08d2af8f9 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/test_ispyb_dataclass.py @@ -0,0 +1,28 @@ +from copy import deepcopy + +import numpy as np + +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_dataclass import ( + GRIDSCAN_ISPYB_PARAM_DEFAULTS, + IspybParams, +) + + +def test_given_position_as_list_when_ispyb_params_created_then_converted_to_numpy_array(): + params = deepcopy(GRIDSCAN_ISPYB_PARAM_DEFAULTS) + params["position"] = [1, 2, 3] + + ispyb_params = IspybParams(**params) + + assert isinstance(ispyb_params.position, np.ndarray) + assert np.array_equal(ispyb_params.position, [1, 2, 3]) + + +def test_given_ispyb_params_when_converted_to_dict_then_position_is_a_list(): + params = deepcopy(GRIDSCAN_ISPYB_PARAM_DEFAULTS) + params["position"] = [1, 2, 3] + + ispyb_params_dict = IspybParams(**params).dict() + + assert isinstance(ispyb_params_dict["position"], list) + assert ispyb_params_dict["position"] == [1, 2, 3] diff --git a/tests/unit_tests/hyperion/external_interaction/test_ispyb_utils.py b/tests/unit_tests/hyperion/external_interaction/test_ispyb_utils.py new file mode 100644 index 000000000..351467e61 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/test_ispyb_utils.py @@ -0,0 +1,69 @@ +import re + +import pytest + +from mx_bluesky.hyperion.external_interaction.callbacks.common.ispyb_mapping import ( + get_proposal_and_session_from_visit_string, + get_visit_string_from_path, +) +from mx_bluesky.hyperion.external_interaction.ispyb.ispyb_utils import ( + get_current_time_string, +) + +TIME_FORMAT_REGEX = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}" + + +def test_get_current_time_string(): + current_time = get_current_time_string() + + assert isinstance(current_time, str) + assert re.match(TIME_FORMAT_REGEX, current_time) is not None + + +@pytest.mark.parametrize( + "visit_path, expected_match", + [ + ("/dls/i03/data/2022/cm6477-45/", "cm6477-45"), + ("/dls/i03/data/2022/cm6477-45", "cm6477-45"), + ("/dls/i03/data/2022/mx54663-1/", "mx54663-1"), + ("/dls/i03/data/2022/mx54663-1", "mx54663-1"), + ("/dls/i03/data/2022/mx53-1/", None), + ("/dls/i03/data/2022/mx53-1", None), + ("/dls/i03/data/2022/mx5563-1565/", None), + ("/dls/i03/data/2022/mx5563-1565", None), + ], +) +def test_find_visit_in_visit_path(visit_path: str, expected_match: str): + test_visit_path = get_visit_string_from_path(visit_path) + assert test_visit_path == expected_match + + +@pytest.mark.parametrize( + "visit_string, expected_proposal, expected_session", + [ + ("cm6477-45", "cm6477", 45), + ("mx54663-1", "mx54663", 1), + ("ea54663985-13651", "ea54663985", 13651), + ], +) +def test_proposal_and_session_from_visit_string_happy_path( + visit_string: str, expected_proposal: str, expected_session: int +): + proposal, session = get_proposal_and_session_from_visit_string(visit_string) + assert proposal == expected_proposal + assert session == expected_session + + +@pytest.mark.parametrize( + "visit_string, exception_type", + [ + ("cm647-7-45", AssertionError), + ("mx54663.1", AssertionError), + ("mx54663-pop", ValueError), + ], +) +def test_given_invalid_visit_string_get_proposal_and_session_throws( + visit_string: str, exception_type +): + with pytest.raises(exception_type): + get_proposal_and_session_from_visit_string(visit_string) diff --git a/tests/unit_tests/hyperion/external_interaction/test_write_rotation_nexus.py b/tests/unit_tests/hyperion/external_interaction/test_write_rotation_nexus.py new file mode 100644 index 000000000..7b6717735 --- /dev/null +++ b/tests/unit_tests/hyperion/external_interaction/test_write_rotation_nexus.py @@ -0,0 +1,484 @@ +import os +from pathlib import Path +from shutil import copy +from unittest.mock import patch + +import bluesky.preprocessors as bpp +import h5py +import numpy as np +import pytest +from bluesky.run_engine import RunEngine +from h5py import Dataset, ExternalLink, Group + +from mx_bluesky.hyperion.device_setup_plans.read_hardware_for_setup import ( + read_hardware_during_collection, +) +from mx_bluesky.hyperion.experiment_plans.rotation_scan_plan import ( + RotationScanComposite, +) +from mx_bluesky.hyperion.external_interaction.callbacks.rotation.nexus_callback import ( + RotationNexusFileCallback, +) +from mx_bluesky.hyperion.external_interaction.nexus.write_nexus import NexusWriter +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.constants import CONST +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from ....conftest import extract_metafile, raw_params_from_file + +TEST_EXAMPLE_NEXUS_FILE = Path("ins_8_5.nxs") +TEST_EXAMPLE_NEXUS_METAFILE_PREFIX = "ins_8_5_meta" +TEST_DATA_DIRECTORY = Path("tests/test_data/nexus_files/rotation") +TEST_FILENAME = "rotation_scan_test_nexus" + + +@pytest.fixture +def test_params(tmpdir): + param_dict = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + param_dict["storage_directory"] = "tests/test_data" + param_dict["file_name"] = f"{tmpdir}/{TEST_FILENAME}" + param_dict["scan_width_deg"] = 360.0 + param_dict["demand_energy_ev"] = 12700 + params = RotationScan(**param_dict) + params.x_start_um = 0 + params.y_start_um = 0 + params.z_start_um = 0 + params.exposure_time_s = 0.004 + return params + + +def fake_rotation_scan( + parameters: RotationScan, + subscription: RotationNexusFileCallback, + rotation_devices: RotationScanComposite, +): + @bpp.subs_decorator(subscription) + @bpp.set_run_key_decorator("rotation_scan_with_cleanup_and_subs") + @bpp.run_decorator( # attach experiment metadata to the start document + md={ + "subplan_name": CONST.PLAN.ROTATION_OUTER, + "hyperion_parameters": parameters.json(), + "activate_callbacks": "RotationNexusFileCallback", + } + ) + def plan(): + yield from read_hardware_during_collection( + rotation_devices.aperture_scatterguard, + rotation_devices.attenuator, + rotation_devices.flux, + rotation_devices.dcm, + rotation_devices.eiger, + ) + + return plan() + + +def dectris_device_mapping(meta_filename: str): + return { + "entry": { + "instrument": { + "detector": { + "bit_depth_image": f"{meta_filename}//_dectris/bit_depth_image", + "bit_depth_readout": f"{meta_filename}//_dectris/bit_depth_image", + "detectorSpecific": { + "ntrigger": f"{meta_filename}///_dectris/ntrigger", + "software_version": f"{meta_filename}//_dectris/software_version", + }, + "detector_readout_time": f"{meta_filename}//_dectris/detector_readout_time", + "flatfield_applied": f"{meta_filename}//_dectris/flatfield_correction_applied", + "photon_energy": f"{meta_filename}//_dectris/photon_energy", + "pixel_mask": f"{meta_filename}//mask", + "pixel_mask_applied": f"{meta_filename}//_dectris/pixel_mask_applied", + "threshold_energy": f"{meta_filename}//_dectris/threshold_energy", + } + } + } + } + + +def apply_metafile_mapping(exceptions: dict, mapping: dict): + """Recursively populate the exceptions map with corresponding mapping entries""" + for key in mapping.keys(): + mapping_value = mapping.get(key) + if isinstance(mapping_value, dict): + exceptions_child = exceptions.setdefault(key, {}) + apply_metafile_mapping(exceptions_child, mapping_value) + else: + exceptions[key] = mapping_value + + +def test_rotation_scan_nexus_output_compared_to_existing_full_compare( + test_params: RotationScan, + tmpdir, + fake_create_rotation_devices: RotationScanComposite, +): + test_params.chi_start_deg = 0 + test_params.phi_start_deg = 0 + run_number = test_params.detector_params.run_number + nexus_filename = f"{tmpdir}/{TEST_FILENAME}_{run_number}.nxs" + master_filename = f"{tmpdir}/{TEST_FILENAME}_{run_number}_master.h5" + meta_filename = f"{TEST_FILENAME}_{run_number}_meta.h5" + + fake_create_rotation_devices.eiger.bit_depth.sim_put(32) # type: ignore + + RE = RunEngine({}) + + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.get_start_and_predicted_end_time", + return_value=("test_time", "test_time"), + ): + RE( + fake_rotation_scan( + test_params, RotationNexusFileCallback(), fake_create_rotation_devices + ) + ) + + assert os.path.isfile(nexus_filename) + assert os.path.isfile(master_filename) + + example_metafile_path = ( + f"{TEST_DATA_DIRECTORY}/{TEST_EXAMPLE_NEXUS_METAFILE_PREFIX}.h5.gz" + ) + extract_metafile( + example_metafile_path, f"{tmpdir}/{TEST_EXAMPLE_NEXUS_METAFILE_PREFIX}.h5" + ) + example_nexus_path = f"{tmpdir}/{TEST_EXAMPLE_NEXUS_FILE}" + copy(TEST_DATA_DIRECTORY / TEST_EXAMPLE_NEXUS_FILE, example_nexus_path) + + # Models expected differences to the GDA master nexus file + # If a key is in _missing then it is not expected to be present + # If a key is in _ignore then we do not compare it + # If a key maps to a dict then we expect it to be a Group + # Otherwise if a key is present we expect it to be a DataSet + # If a key maps to a callable then we use that as the comparison function + # Otherwise we compare the scalar or array value as appropriate + exceptions = { + "entry": { + "_missing": {"end_time"}, + "data": {"_ignore": {"data", "omega"}}, + "instrument": { + "_missing": {"transformations", "detector_z", "source"}, + "detector": { + "_missing": { + "detector_distance", + "serial_number", # nexgen#236 + }, + "distance": 0.1, + "transformations": { + "detector_z": {"det_z": np.array([100])}, + "det_z": np.array([100]), + }, + "detector_z": {"det_z": np.array([100])}, + "underload_value": 0, + "_ignore": { + "beam_center_x", + "beam_center_y", + "depends_on", + "saturation_value", + "sensor_material", + }, + "detectorSpecific": { + "_missing": {"pixel_mask"}, + "x_pixels": 4148, + "y_pixels": 4362, + }, + "module": {"_ignore": {"module_offset"}}, + "sensor_thickness": np.isclose, + }, + "attenuator": {"attenuator_transmission": np.isclose}, + "beam": {"incident_wavelength": np.isclose}, + "name": b"DIAMOND BEAMLINE S03", + }, + "sample": { + "beam": {"incident_wavelength": np.isclose}, + "transformations": { + "_missing": {"omega_end"}, + "_ignore": {"omega"}, + "omega_increment_set": 0.1, + "omega_end": lambda a, b: np.all(np.isclose(a, b, atol=1e-03)), + }, + "sample_omega": { + "_ignore": {"omega_end", "omega"}, + "omega_increment_set": 0.1, + }, + "sample_x": {"sam_x": np.isclose}, + "sample_y": {"sam_y": np.isclose}, + "sample_z": {"sam_z": np.isclose}, + "sample_chi": {"chi": np.isclose}, + "sample_phi": {"phi": np.isclose}, + }, + "end_time_estimated": b"test_timeZ", + "start_time": b"test_timeZ", + "source": { + "name": b"Diamond Light Source", + "type": b"Synchrotron X-ray Source", + }, + } + } + + with ( + h5py.File(example_nexus_path, "r") as example_nexus, + h5py.File(nexus_filename, "r") as hyperion_nexus, + ): + apply_metafile_mapping(exceptions, dectris_device_mapping(meta_filename)) + _compare_actual_and_expected_nexus_output( + hyperion_nexus, example_nexus, exceptions + ) + + +def test_rotation_scan_nexus_output_compared_to_existing_file( + test_params: RotationScan, + tmpdir, + fake_create_rotation_devices: RotationScanComposite, +): + run_number = test_params.run_number or test_params.detector_params.run_number + nexus_filename = f"{tmpdir}/{TEST_FILENAME}_{run_number}.nxs" + master_filename = f"{tmpdir}/{TEST_FILENAME}_{run_number}_master.h5" + + fake_create_rotation_devices.eiger.bit_depth.sim_put(32) # type: ignore + + RE = RunEngine({}) + + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.get_start_and_predicted_end_time", + return_value=("test_time", "test_time"), + ): + RE( + fake_rotation_scan( + test_params, RotationNexusFileCallback(), fake_create_rotation_devices + ) + ) + + assert os.path.isfile(nexus_filename) + assert os.path.isfile(master_filename) + + with ( + h5py.File( + str(TEST_DATA_DIRECTORY / TEST_EXAMPLE_NEXUS_FILE), "r" + ) as example_nexus, + h5py.File(nexus_filename, "r") as hyperion_nexus, + ): + assert hyperion_nexus["/entry/start_time"][()] == b"test_timeZ" # type: ignore + assert hyperion_nexus["/entry/end_time_estimated"][()] == b"test_timeZ" # type: ignore + + # we used to write the positions wrong... + hyperion_omega: np.ndarray = np.array( + hyperion_nexus["/entry/data/omega"][:] # type: ignore + ) + example_omega: np.ndarray = example_nexus["/entry/data/omega"][:] # type: ignore + assert np.allclose(hyperion_omega, example_omega) + + assert isinstance( + hyperion_data := hyperion_nexus["/entry/data/data"], h5py.Dataset + ) + example_data_shape = example_nexus["/entry/data/data"].shape # type: ignore + + assert hyperion_data.dtype == "uint32" + assert hyperion_data.shape == example_data_shape + + hyperion_instrument = hyperion_nexus["/entry/instrument"] + example_instrument = example_nexus["/entry/instrument"] + transmission = "attenuator/attenuator_transmission" + wavelength = "beam/incident_wavelength" + assert np.isclose( + hyperion_instrument[transmission][()], # type: ignore + example_instrument[transmission][()], # type: ignore + ) + assert np.isclose( + hyperion_instrument[wavelength][()], # type: ignore + example_instrument[wavelength][()], # type: ignore + ) + + hyperion_sam_x = hyperion_nexus["/entry/sample/sample_x/sam_x"] + example_sam_x = example_nexus["/entry/sample/sample_x/sam_x"] + hyperion_sam_y = hyperion_nexus["/entry/sample/sample_y/sam_y"] + example_sam_y = example_nexus["/entry/sample/sample_y/sam_y"] + hyperion_sam_z = hyperion_nexus["/entry/sample/sample_z/sam_z"] + example_sam_z = example_nexus["/entry/sample/sample_z/sam_z"] + + hyperion_sam_phi = hyperion_nexus["/entry/sample/sample_phi/phi"] + example_sam_phi = example_nexus["/entry/sample/sample_phi/phi"] + hyperion_sam_chi = hyperion_nexus["/entry/sample/sample_chi/chi"] + example_sam_chi = example_nexus["/entry/sample/sample_chi/chi"] + + hyperion_sam_omega = hyperion_nexus["/entry/sample/sample_omega/omega"] + example_sam_omega = example_nexus["/entry/sample/sample_omega/omega"] + + assert np.isclose( + hyperion_sam_x[()], # type: ignore + example_sam_x[()], # type: ignore + ) + assert np.isclose( + hyperion_sam_y[()], # type: ignore + example_sam_y[()], # type: ignore + ) + assert np.isclose( + hyperion_sam_z[()], # type: ignore + example_sam_z[()], # type: ignore + ) + + assert hyperion_sam_x.attrs.get("depends_on") == example_sam_x.attrs.get( + "depends_on" + ) + assert hyperion_sam_y.attrs.get("depends_on") == example_sam_y.attrs.get( + "depends_on" + ) + assert hyperion_sam_z.attrs.get("depends_on") == example_sam_z.attrs.get( + "depends_on" + ) + assert hyperion_sam_phi.attrs.get("depends_on") == example_sam_phi.attrs.get( + "depends_on" + ) + assert hyperion_sam_chi.attrs.get("depends_on") == example_sam_chi.attrs.get( + "depends_on" + ) + assert hyperion_sam_omega.attrs.get( + "depends_on" + ) == example_sam_omega.attrs.get("depends_on") + + +@pytest.mark.parametrize( + "bit_depth,expected_type", + [(8, np.uint8), (16, np.uint16), (32, np.uint32), (100, np.uint16)], +) +@patch("mx_bluesky.hyperion.external_interaction.nexus.write_nexus.NXmxFileWriter") +def test_given_detector_bit_depth_changes_then_vds_datatype_as_expected( + mock_nexus_writer, + test_params: RotationScan, + fake_create_rotation_devices: RotationScanComposite, + bit_depth, + expected_type, +): + write_vds_mock = mock_nexus_writer.return_value.write_vds + + fake_create_rotation_devices.eiger.bit_depth.sim_put(bit_depth) # type: ignore + + RE = RunEngine({}) + + with patch( + "mx_bluesky.hyperion.external_interaction.nexus.write_nexus.get_start_and_predicted_end_time", + return_value=("test_time", "test_time"), + ): + RE( + fake_rotation_scan( + test_params, RotationNexusFileCallback(), fake_create_rotation_devices + ) + ) + + for call in write_vds_mock.mock_calls: + assert call.kwargs["vds_dtype"] == expected_type + + +def _compare_actual_and_expected_nexus_output(actual, expected, exceptions: dict): + _compare_actual_and_expected([], actual, expected, exceptions) + + +def _compare_actual_and_expected(path: list[str], actual, expected, exceptions: dict): + if expected is None: + # The nexus file under test contains a node that isn't in the original GDA reference nexus file + # but we may still expect something if the exception map contains it + expected = {} + + path_str = "/".join(path) + LOGGER.debug(f"Comparing {path_str}") + keys_not_in_actual = ( + expected.keys() - actual.keys() - exceptions.get("_missing", set()) + ) + assert ( + len(keys_not_in_actual) == 0 + ), f"Missing entries in group {path_str}, {keys_not_in_actual}" + + keys_to_compare = actual.keys() + keys_to_ignore = exceptions.get("_ignore", set()) + keys_not_in_expected = ( + keys_to_compare + - expected.keys() + - {k for k in exceptions.keys() if not k.startswith("_")} + - keys_to_ignore + ) + cmp = len(keys_not_in_expected) == 0 + keys_to_compare = sorted(keys_to_compare) + assert cmp, f"Found unexpected entries in group {path_str}, {keys_not_in_expected}" + for key in keys_to_compare: + item_path = path + [key] + actual_link = actual.get(key, getlink=True) + item_path_str = "/" + "/".join(item_path) + exception = exceptions.get(key, None) + if isinstance(actual_link, ExternalLink): + if exception: + actual_link_path = f"{actual_link.filename}//{actual_link.path}" + assert ( + actual_link_path == exception + ), f"Actual and expected external links differ {actual_link_path}, {exception}" + else: + LOGGER.debug( + f"Skipping external link {item_path_str} -> {actual_link.path}" + ) + continue + actual_class = actual.get(key, getclass=True, getlink=False) + expected_class = ( + Group + if isinstance(exception, dict) + else ( + Dataset if exception is not None else expected.get(key, getclass=True) # type: ignore + ) + ) + actual_value = actual.get(key) + expected_value = ( + expected.get(key) + if (exception is None or isinstance(exception, dict) or callable(exception)) + else exception + ) + if expected_class == Group: + _compare_actual_and_expected( + item_path, actual_value, expected.get(key), exceptions.get(key, {}) + ) + elif (expected_class == Dataset) and key not in keys_to_ignore: + if isinstance(expected_value, Dataset): + # Only check shape if we didn't override the expected value + assert ( + actual_value.shape == expected_value.shape + ), f"Actual and expected shapes differ for {item_path_str}: {actual_value.shape}, {expected_value.shape}" + else: + assert hasattr(actual_value, "shape"), f"No shape for {item_path_str}" + expected_shape = np.shape(expected_value) # type: ignore + assert ( + actual_value.shape == expected_shape + ), f"{item_path_str} data shape not expected shape{actual_value.shape}, {expected_shape}" + if actual_value.shape == (): + if callable(exception): + assert exceptions.get(key)(actual_value, expected_value) # type: ignore + elif np.isscalar(exception): + assert ( + actual_value[()] == exception + ), f"{item_path_str} actual and expected did not match {actual_value[()]}, {exception}." + else: + assert ( + actual_class == expected_class + ), f"{item_path_str} Actual and expected class don't match {actual_class}, {expected_class}" + assert ( + actual_value[()] == expected_value[()] # type: ignore + ), f"Actual and expected values differ for {item_path_str}: {actual_value[()]} != {expected_value[()]}" # type: ignore + else: + actual_value_str = np.array2string(actual_value, threshold=10) + expected_value_str = np.array2string(expected_value, threshold=10) # type: ignore + if callable(exception): + assert exception( + actual_value, expected_value + ), f"Actual and expected values differ for {item_path_str}: {actual_value_str} != {expected_value_str}, according to {exception}" + else: + assert np.array_equal( + actual_value, + expected_value, # type: ignore + ), f"Actual and expected values differ for {item_path_str}: {actual_value_str} != {expected_value_str}" + + +def test_override_parameters_override(test_params: RotationScan): + writer = NexusWriter( + test_params, (1, 2, 3), {}, full_num_of_images=82367, meta_data_run_number=9852 + ) + assert writer.full_num_of_images != test_params.num_images + assert writer.full_num_of_images == 82367 + assert writer.data_filename == f"{test_params.file_name}_9852" diff --git a/tests/unit_tests/hyperion/parameters/__init__.py b/tests/unit_tests/hyperion/parameters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/parameters/test_parameter_model.py b/tests/unit_tests/hyperion/parameters/test_parameter_model.py new file mode 100644 index 000000000..abf7d5d9a --- /dev/null +++ b/tests/unit_tests/hyperion/parameters/test_parameter_model.py @@ -0,0 +1,112 @@ +import json +from pathlib import Path + +import pytest +from pydantic import ValidationError + +from mx_bluesky.hyperion.parameters.gridscan import ( + OddYStepsException, + RobotLoadThenCentre, + ThreeDGridScan, +) +from mx_bluesky.hyperion.parameters.rotation import RotationScan + +from ....conftest import raw_params_from_file + + +@pytest.fixture +def minimal_3d_gridscan_params(): + return { + "sample_id": 123, + "x_start_um": 0.123, + "y_start_um": 0.777, + "z_start_um": 0.05, + "parameter_model_version": "5.0.0", + "visit": "cm12345", + "file_name": "test_file_name", + "y2_start_um": 2, + "z2_start_um": 2, + "x_steps": 5, + "y_steps": 7, + "z_steps": 9, + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/xraycentring/123456/", + } + + +def test_minimal_3d_gridscan_params(minimal_3d_gridscan_params): + test_params = ThreeDGridScan(**minimal_3d_gridscan_params) + assert {"sam_x", "sam_y", "sam_z"} == set(test_params.scan_points.keys()) + assert test_params.scan_indices == [0, 35] + assert test_params.num_images == (5 * 7 + 5 * 9) + assert test_params.exposure_time_s == 0.02 + + +def test_cant_do_panda_fgs_with_odd_y_steps(minimal_3d_gridscan_params): + test_params = ThreeDGridScan(**minimal_3d_gridscan_params) + with pytest.raises(OddYStepsException): + _ = test_params.panda_FGS_params + assert test_params.FGS_params + + +def test_serialise_deserialise(minimal_3d_gridscan_params): + test_params = ThreeDGridScan(**minimal_3d_gridscan_params) + serialised = json.loads(test_params.json()) + deserialised = ThreeDGridScan(**serialised) + assert deserialised.demand_energy_ev is None + assert deserialised.visit == "cm12345" + assert deserialised.x_start_um == 0.123 + + +def test_param_version(minimal_3d_gridscan_params): + with pytest.raises(ValidationError): + minimal_3d_gridscan_params["parameter_model_version"] = "4.3.0" + _ = ThreeDGridScan(**minimal_3d_gridscan_params) + minimal_3d_gridscan_params["parameter_model_version"] = "5.0.0" + _ = ThreeDGridScan(**minimal_3d_gridscan_params) + minimal_3d_gridscan_params["parameter_model_version"] = "5.3.0" + _ = ThreeDGridScan(**minimal_3d_gridscan_params) + minimal_3d_gridscan_params["parameter_model_version"] = "5.3.7" + _ = ThreeDGridScan(**minimal_3d_gridscan_params) + with pytest.raises(ValidationError): + minimal_3d_gridscan_params["parameter_model_version"] = "6.3.7" + _ = ThreeDGridScan(**minimal_3d_gridscan_params) + + +def test_robot_load_then_centre_params(): + params = { + "parameter_model_version": "5.0.0", + "sample_id": 123456, + "visit": "cm12345", + "file_name": "file_name", + "storage_directory": "/tmp/dls/i03/data/2024/cm31105-4/xraycentring/123456/", + } + params["detector_distance_mm"] = 200 + test_params = RobotLoadThenCentre(**params) + assert test_params.visit_directory + assert test_params.detector_params + + +def test_default_snapshot_path(minimal_3d_gridscan_params): + gridscan_params = ThreeDGridScan(**minimal_3d_gridscan_params) + assert gridscan_params.snapshot_directory == Path( + "/tmp/dls/i03/data/2024/cm31105-4/xraycentring/123456/snapshots" + ) + + params_with_snapshot_path = dict(minimal_3d_gridscan_params) + params_with_snapshot_path["snapshot_directory"] = "/tmp/my_snapshots" + + gridscan_params_with_snapshot_path = ThreeDGridScan(**params_with_snapshot_path) + assert gridscan_params_with_snapshot_path.snapshot_directory == Path( + "/tmp/my_snapshots" + ) + + +def test_osc_is_used(): + raw_params = raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_rotation_scan_parameters.json" + ) + for osc in [0.001, 0.05, 0.1, 0.2, 0.75, 1, 1.43]: + raw_params["rotation_increment_deg"] = osc + params = RotationScan(**raw_params) + assert params.rotation_increment_deg == osc + assert params.num_images == int(params.scan_width_deg / osc) diff --git a/tests/unit_tests/hyperion/test_exceptions.py b/tests/unit_tests/hyperion/test_exceptions.py new file mode 100644 index 000000000..56d513c14 --- /dev/null +++ b/tests/unit_tests/hyperion/test_exceptions.py @@ -0,0 +1,23 @@ +import pytest +from bluesky.plan_stubs import null + +from mx_bluesky.hyperion.exceptions import WarningException, catch_exception_and_warn + + +class _TestException(Exception): + pass + + +def dummy_plan(): + yield from null() + raise _TestException + + +def test_catch_exception_and_warn_correctly_raises_warning_exception(RE): + with pytest.raises(WarningException): + RE(catch_exception_and_warn(_TestException, dummy_plan)) + + +def test_catch_exception_and_warn_correctly_raises_original_exception(RE): + with pytest.raises(_TestException): + RE(catch_exception_and_warn(ValueError, dummy_plan)) diff --git a/tests/unit_tests/hyperion/test_log/__init__.py b/tests/unit_tests/hyperion/test_log/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/test_log/conftest.py b/tests/unit_tests/hyperion/test_log/conftest.py new file mode 100644 index 000000000..09a7add66 --- /dev/null +++ b/tests/unit_tests/hyperion/test_log/conftest.py @@ -0,0 +1,13 @@ +from dodal.log import LOGGER + +from mx_bluesky.hyperion.log import ALL_LOGGERS + +from ....conftest import _reset_loggers + + +def pytest_runtest_setup(): + _reset_loggers([*ALL_LOGGERS, LOGGER]) + + +def pytest_runtest_teardown(): + _reset_loggers([*ALL_LOGGERS, LOGGER]) diff --git a/tests/unit_tests/hyperion/test_log/test_log.py b/tests/unit_tests/hyperion/test_log/test_log.py new file mode 100644 index 000000000..62c708177 --- /dev/null +++ b/tests/unit_tests/hyperion/test_log/test_log.py @@ -0,0 +1,197 @@ +import os +from logging import FileHandler +from logging.handlers import TimedRotatingFileHandler +from unittest.mock import MagicMock, patch + +import pytest +from bluesky import plan_stubs as bps +from bluesky import preprocessors as bpp +from dodal.log import LOGGER as dodal_logger +from dodal.log import set_up_all_logging_handlers + +from mx_bluesky.hyperion import log +from mx_bluesky.hyperion.external_interaction.callbacks.log_uid_tag_callback import ( + LogUidTaggingCallback, +) + +from ....conftest import clear_log_handlers + + +@pytest.fixture(scope="function") +def clear_and_mock_loggers(): + clear_log_handlers([*log.ALL_LOGGERS, dodal_logger]) + mock_open_with_tell = MagicMock() + mock_open_with_tell.tell.return_value = 0 + with ( + patch("dodal.log.logging.FileHandler._open", mock_open_with_tell), + patch("dodal.log.GELFTCPHandler.emit") as graylog_emit, + patch("dodal.log.TimedRotatingFileHandler.emit") as filehandler_emit, + ): + graylog_emit.reset_mock() + filehandler_emit.reset_mock() + yield filehandler_emit, graylog_emit + clear_log_handlers([*log.ALL_LOGGERS, dodal_logger]) + + +@pytest.mark.skip_log_setup +def test_no_env_variable_sets_correct_file_handler( + clear_and_mock_loggers, +) -> None: + log.do_default_logging_setup(dev_mode=True) + file_handlers: FileHandler = next( + filter(lambda h: isinstance(h, FileHandler), dodal_logger.handlers) # type: ignore + ) + + assert file_handlers.baseFilename.endswith("/tmp/dev/hyperion.log") + + +@pytest.mark.skip_log_setup +@patch("dodal.log.Path.mkdir", autospec=True) +@patch.dict( + os.environ, {"HYPERION_LOG_DIR": "./dls_sw/s03/logs/bluesky"} +) # Note we use a relative path here so it works in CI +def test_set_env_variable_sets_correct_file_handler( + mock_dir, + clear_and_mock_loggers, +) -> None: + log.do_default_logging_setup(dev_mode=True) + + file_handlers: FileHandler = next( + filter(lambda h: isinstance(h, FileHandler), dodal_logger.handlers) # type: ignore + ) + + assert file_handlers.baseFilename.endswith("/dls_sw/s03/logs/bluesky/hyperion.log") + + +@pytest.mark.skip_log_setup +def test_messages_logged_from_dodal_and_hyperion_contain_dcgid( + clear_and_mock_loggers, +): + _, mock_GELFTCPHandler_emit = clear_and_mock_loggers + log.do_default_logging_setup(dev_mode=True) + + log.set_dcgid_tag(100) + + logger = log.LOGGER + logger.info("test_hyperion") + dodal_logger.info("test_dodal") + + graylog_calls = mock_GELFTCPHandler_emit.mock_calls[1:] + + dc_group_id_correct = [c.args[0].dc_group_id == 100 for c in graylog_calls] + assert all(dc_group_id_correct) + + +@pytest.mark.skip_log_setup +def test_messages_are_tagged_with_run_uid(clear_and_mock_loggers, RE): + _, mock_GELFTCPHandler_emit = clear_and_mock_loggers + log.do_default_logging_setup(dev_mode=True) + + RE.subscribe(LogUidTaggingCallback()) + test_run_uid = None + logger = log.LOGGER + + @bpp.run_decorator() + def test_plan(): + yield from bps.sleep(0) + assert log.tag_filter.run_uid is not None + nonlocal test_run_uid + test_run_uid = log.tag_filter.run_uid + logger.info("test_hyperion") + logger.info("test_hyperion") + yield from bps.sleep(0) + + assert log.tag_filter.run_uid is None + RE(test_plan()) + assert log.tag_filter.run_uid is None + + graylog_calls_in_plan = [ + c.args[0] + for c in mock_GELFTCPHandler_emit.mock_calls + if c.args[0].msg == "test_hyperion" + ] + + assert len(graylog_calls_in_plan) == 2 + + dc_group_id_correct = [ + record.run_uid == test_run_uid for record in graylog_calls_in_plan + ] + assert all(dc_group_id_correct) + + +@pytest.mark.skip_log_setup +def test_messages_logged_from_dodal_and_hyperion_get_sent_to_graylog_and_file( + clear_and_mock_loggers, +): + mock_filehandler_emit, mock_GELFTCPHandler_emit = clear_and_mock_loggers + log.do_default_logging_setup(dev_mode=True) + logger = log.LOGGER + logger.info("test_hyperion") + dodal_logger.info("test_dodal") + + filehandler_calls = mock_filehandler_emit.mock_calls + graylog_calls = mock_GELFTCPHandler_emit.mock_calls + + assert len(filehandler_calls) >= 2 + assert len(graylog_calls) >= 2 + + for handler in [filehandler_calls, graylog_calls]: + handler_names = [c.args[0].name for c in handler] + handler_messages = [c.args[0].message for c in handler] + assert "Hyperion" in handler_names + assert "Dodal" in handler_names + assert "test_hyperion" in handler_messages + assert "test_dodal" in handler_messages + + +@pytest.mark.skip_log_setup +def test_callback_loggers_log_to_own_files( + clear_and_mock_loggers, +): + mock_filehandler_emit, mock_GELFTCPHandler_emit = clear_and_mock_loggers + log.do_default_logging_setup(dev_mode=True) + + hyperion_logger = log.LOGGER + ispyb_logger = log.ISPYB_LOGGER + nexus_logger = log.NEXUS_LOGGER + for logger in [ispyb_logger, nexus_logger]: + set_up_all_logging_handlers( + logger, log._get_logging_dir(), logger.name, True, 10000 + ) + + hyperion_logger.info("test_hyperion") + ispyb_logger.info("test_ispyb") + nexus_logger.info("test_nexus") + + total_filehandler_calls = mock_filehandler_emit.mock_calls + total_graylog_calls = mock_GELFTCPHandler_emit.mock_calls + + assert len(total_filehandler_calls) == len(total_graylog_calls) + + hyperion_filehandler = next( + filter(lambda h: isinstance(h, TimedRotatingFileHandler), dodal_logger.handlers) # type: ignore + ) + ispyb_filehandler = next( + filter(lambda h: isinstance(h, TimedRotatingFileHandler), ispyb_logger.handlers) # type: ignore + ) + nexus_filehandler = next( + filter(lambda h: isinstance(h, TimedRotatingFileHandler), nexus_logger.handlers) # type: ignore + ) + assert nexus_filehandler.baseFilename != hyperion_filehandler.baseFilename # type: ignore + assert ispyb_filehandler.baseFilename != hyperion_filehandler.baseFilename # type: ignore + assert ispyb_filehandler.baseFilename != nexus_filehandler.baseFilename # type: ignore + + +@pytest.mark.skip_log_setup +def test_log_writes_debug_file_on_error(clear_and_mock_loggers): + mock_filehandler_emit, _ = clear_and_mock_loggers + log.do_default_logging_setup(dev_mode=True) + log.LOGGER.debug("debug_message_1") + log.LOGGER.debug("debug_message_2") + mock_filehandler_emit.assert_not_called() + log.LOGGER.error("error happens") + assert len(mock_filehandler_emit.mock_calls) == 4 + messages = [call.args[0].message for call in mock_filehandler_emit.mock_calls] + assert "debug_message_1" in messages + assert "debug_message_2" in messages + assert "error happens" in messages diff --git a/tests/unit_tests/hyperion/test_main_system.py b/tests/unit_tests/hyperion/test_main_system.py new file mode 100644 index 000000000..5885afc99 --- /dev/null +++ b/tests/unit_tests/hyperion/test_main_system.py @@ -0,0 +1,563 @@ +from __future__ import annotations + +import functools +import json +import os +import threading +from collections.abc import Callable +from dataclasses import dataclass +from queue import Queue +from sys import argv +from time import sleep +from typing import Any +from unittest.mock import MagicMock, patch + +import flask +import pytest +from blueapi.core import BlueskyContext +from dodal.devices.attenuator import Attenuator +from dodal.devices.zebra import Zebra +from flask.testing import FlaskClient + +from mx_bluesky.hyperion.__main__ import ( + Actions, + BlueskyRunner, + Status, + create_app, + create_targets, + setup_context, +) +from mx_bluesky.hyperion.exceptions import WarningException +from mx_bluesky.hyperion.experiment_plans.experiment_registry import PLAN_REGISTRY +from mx_bluesky.hyperion.log import LOGGER +from mx_bluesky.hyperion.parameters.cli import parse_cli_args +from mx_bluesky.hyperion.parameters.gridscan import ThreeDGridScan +from mx_bluesky.hyperion.utils.context import device_composite_from_context + +from ...conftest import raw_params_from_file + +FGS_ENDPOINT = "/flyscan_xray_centre/" +START_ENDPOINT = FGS_ENDPOINT + Actions.START.value +STOP_ENDPOINT = Actions.STOP.value +STATUS_ENDPOINT = Actions.STATUS.value +SHUTDOWN_ENDPOINT = Actions.SHUTDOWN.value +TEST_BAD_PARAM_ENDPOINT = "/fgs_real_params/" + Actions.START.value +TEST_PARAMS = json.dumps( + raw_params_from_file( + "tests/test_data/parameter_json_files/good_test_parameters.json" + ) +) + +SECS_PER_RUNENGINE_LOOP = 0.1 +RUNENGINE_TAKES_TIME_TIMEOUT = 15 + +""" +Every test in this file which uses the test_env fixture should either: + - set RE_takes_time to false + or + - set an error on the mock run engine +In order to avoid threads which get left alive forever after test completion +""" + + +autospec_patch = functools.partial(patch, autospec=True, spec_set=True) + + +class MockRunEngine: + def __init__(self, test_name): + self.RE_takes_time = True + self.aborting_takes_time = False + self.error: Exception | None = None + self.test_name = test_name + + def __call__(self, *args: Any, **kwds: Any) -> Any: + time = 0.0 + while self.RE_takes_time: + sleep(SECS_PER_RUNENGINE_LOOP) + time += SECS_PER_RUNENGINE_LOOP + if self.error: + raise self.error + if time > RUNENGINE_TAKES_TIME_TIMEOUT: + raise TimeoutError( + f'Mock RunEngine thread for test "{self.test_name}" spun too long' + "without an error. Most likely you should initialise with " + "RE_takes_time=false, or set RE.error from another thread." + ) + if self.error: + raise self.error + + def abort(self): + while self.aborting_takes_time: + sleep(SECS_PER_RUNENGINE_LOOP) + if self.error: + raise self.error + self.RE_takes_time = False + + def subscribe(self, *args): + pass + + def unsubscribe(self, *args): + pass + + +@dataclass +class ClientAndRunEngine: + client: FlaskClient + mock_run_engine: MockRunEngine + + +def mock_dict_values(d: dict): + return {k: MagicMock() if k == "setup" or k == "run" else v for k, v in d.items()} + + +TEST_EXPTS = { + "test_experiment": { + "setup": MagicMock(), + "param_type": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + "test_experiment_no_internal_param_type": { + "setup": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + "fgs_real_params": { + "setup": MagicMock(), + "param_type": ThreeDGridScan, + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, +} + + +@pytest.fixture +def test_env(request: pytest.FixtureRequest): + mock_run_engine = MockRunEngine(test_name=repr(request)) + mock_context = BlueskyContext() + real_plans_and_test_exps = dict( + {k: mock_dict_values(v) for k, v in PLAN_REGISTRY.items()}, # type: ignore + **TEST_EXPTS, # type: ignore + ) + mock_context.plan_functions = { # type: ignore + k: MagicMock() for k in real_plans_and_test_exps.keys() + } + + with ( + patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", + real_plans_and_test_exps, + ), + patch( + "mx_bluesky.hyperion.__main__.setup_context", + MagicMock(return_value=mock_context), + ), + ): + app, runner = create_app({"TESTING": True}, mock_run_engine, True) # type: ignore + + runner_thread = threading.Thread(target=runner.wait_on_queue) + runner_thread.start() + with ( + app.test_client() as client, + patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", + real_plans_and_test_exps, + ), + ): + yield ClientAndRunEngine(client, mock_run_engine) + + runner.shutdown() + runner_thread.join(timeout=3) + del mock_run_engine + + +def wait_for_run_engine_status( + client: FlaskClient, + status_check: Callable[[str], bool] = lambda status: status != Status.BUSY.value, + attempts=10, +): + while attempts != 0: + response = client.get(STATUS_ENDPOINT) + response_json = json.loads(response.data) + LOGGER.debug( + f"Checking client status - response: {response_json}, attempts left={attempts}" + ) + if status_check(response_json["status"]): + return response_json + else: + attempts -= 1 + sleep(0.2) + raise AssertionError("Run engine still busy") + + +def check_status_in_response(response_object, expected_result: Status): + response_json = json.loads(response_object.data) + assert response_json["status"] == expected_result.value + + +def test_start_gives_success(test_env: ClientAndRunEngine): + response = test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + check_status_in_response(response, Status.SUCCESS) + + +def test_getting_status_return_idle(test_env: ClientAndRunEngine): + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + test_env.client.put(STOP_ENDPOINT) + response = test_env.client.get(STATUS_ENDPOINT) + check_status_in_response(response, Status.IDLE) + + +def test_getting_status_after_start_sent_returns_busy( + test_env: ClientAndRunEngine, +): + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + response = test_env.client.get(STATUS_ENDPOINT) + check_status_in_response(response, Status.BUSY) + + +def test_putting_bad_plan_fails(test_env: ClientAndRunEngine): + response = test_env.client.put("/bad_plan/start", data=TEST_PARAMS).json + assert isinstance(response, dict) + assert response.get("status") == Status.FAILED.value + assert ( + response.get("message") + == "PlanNotFound(\"Experiment plan 'bad_plan' not found in registry.\")" + ) + test_env.mock_run_engine.abort() + + +def test_plan_with_no_params_fails(test_env: ClientAndRunEngine): + response = test_env.client.put( + "/test_experiment_no_internal_param_type/start", data=TEST_PARAMS + ).json + assert isinstance(response, dict) + assert response.get("status") == Status.FAILED.value + assert ( + response.get("message") + == "PlanNotFound(\"Corresponding internal param type for 'test_experiment_no_internal_param_type' not found in registry.\")" + ) + test_env.mock_run_engine.abort() + + +def test_sending_start_twice_fails(test_env: ClientAndRunEngine): + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + response = test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + check_status_in_response(response, Status.FAILED) + + +def test_given_started_when_stopped_then_success_and_idle_status( + test_env: ClientAndRunEngine, +): + test_env.mock_run_engine.aborting_takes_time = True + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + response = test_env.client.put(STOP_ENDPOINT) + check_status_in_response(response, Status.ABORTING) + response = test_env.client.get(STATUS_ENDPOINT) + check_status_in_response(response, Status.ABORTING) + test_env.mock_run_engine.aborting_takes_time = False + wait_for_run_engine_status( + test_env.client, lambda status: status != Status.ABORTING + ) + check_status_in_response(response, Status.ABORTING) + + +def test_given_started_when_stopped_and_started_again_then_runs( + test_env: ClientAndRunEngine, +): + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + test_env.client.put(STOP_ENDPOINT) + test_env.mock_run_engine.RE_takes_time = True + response = test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + check_status_in_response(response, Status.SUCCESS) + response = test_env.client.get(STATUS_ENDPOINT) + check_status_in_response(response, Status.BUSY) + test_env.mock_run_engine.RE_takes_time = False + + +def test_when_started_n_returnstatus_interrupted_bc_RE_aborted_thn_error_reptd( + test_env: ClientAndRunEngine, +): + test_env.mock_run_engine.aborting_takes_time = True + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + test_env.client.put(STOP_ENDPOINT) + test_env.mock_run_engine.error = Exception("D'Oh") + response_json = wait_for_run_engine_status( + test_env.client, lambda status: status != Status.ABORTING.value + ) + assert response_json["status"] == Status.FAILED.value + assert response_json["message"] == 'Exception("D\'Oh")' + assert response_json["exception_type"] == "Exception" + + +def test_start_with_json_file_gives_success(test_env: ClientAndRunEngine): + test_env.mock_run_engine.RE_takes_time = False + + with open( + "tests/test_data/parameter_json_files/good_test_parameters.json" + ) as test_params_file: + test_params = test_params_file.read() + response = test_env.client.put(START_ENDPOINT, data=test_params) + check_status_in_response(response, Status.SUCCESS) + + +test_argument_combinations = [ + ( + [ + "--dev", + ], + (True, False, False, False), + ), + ([], (False, False, False, False)), + ( + [ + "--dev", + "--skip-startup-connection", + "--external-callbacks", + "--verbose-event-logging", + ], + (True, True, True, True), + ), + ( + ["--external-callbacks"], + (False, False, False, True), + ), +] + + +@pytest.mark.parametrize(["arg_list", "parsed_arg_values"], test_argument_combinations) +def test_cli_args_parse(arg_list, parsed_arg_values): + argv[1:] = arg_list + test_args = parse_cli_args() + assert test_args.dev_mode == parsed_arg_values[0] + assert test_args.verbose_event_logging == parsed_arg_values[1] + assert test_args.skip_startup_connection == parsed_arg_values[2] + assert test_args.use_external_callbacks == parsed_arg_values[3] + + +@patch("mx_bluesky.hyperion.__main__.do_default_logging_setup") +@patch("mx_bluesky.hyperion.__main__.Publisher") +@patch("mx_bluesky.hyperion.__main__.setup_context") +@patch("dodal.log.GELFTCPHandler.emit") +@patch("dodal.log.TimedRotatingFileHandler.emit") +@pytest.mark.parametrize(["arg_list", "parsed_arg_values"], test_argument_combinations) +def test_blueskyrunner_uses_cli_args_correctly_for_callbacks( + filehandler_emit, + graylog_emit, + setup_context: MagicMock, + zmq_publisher: MagicMock, + set_up_logging_handlers: MagicMock, + arg_list, + parsed_arg_values, +): + mock_params = MagicMock() + mock_param_class = MagicMock() + mock_param_class.from_json.return_value = mock_params + callbacks_mock = MagicMock( + name="mock_callback_class", + return_value=("test_cb_1", "test_cb_2"), + ) + + TEST_REGISTRY = { + "test_experiment": { + "setup": MagicMock(), + "param_type": mock_param_class, + "callback_collection_type": callbacks_mock, + } + } + + @dataclass + class MockCommand: + action: Actions + devices: Any = None + experiment: Any = None + parameters: Any = None + callbacks: Any = None + + with ( + flask.Flask(__name__).test_request_context() as flask_context, + patch("mx_bluesky.hyperion.__main__.Command", MockCommand), + patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", TEST_REGISTRY, clear=True + ), + ): + flask_context.request.data = b"{}" # type: ignore + argv[1:] = arg_list + app, runner, port, dev_mode = create_targets() + runner.RE = MagicMock() + runner.command_queue = Queue() + runner_thread = threading.Thread(target=runner.wait_on_queue, daemon=True) + runner_thread.start() + assert dev_mode == parsed_arg_values[0] + + mock_context = MagicMock() + mock_context.plan_functions = {"test_experiment": MagicMock()} + runner.command_queue.put( + MockCommand( + action=Actions.START, + devices={}, + experiment="test_experiment", + parameters={}, + callbacks=callbacks_mock, + ), # type: ignore + block=True, # type: ignore + ) + runner.shutdown() + runner_thread.join() + assert (zmq_publisher.call_count == 1) == parsed_arg_values[3] + if parsed_arg_values[3]: + assert runner.RE.subscribe.call_count == 0 + else: + assert runner.RE.subscribe.call_count == 2 + + +@pytest.mark.skip( + "Wait for connection doesn't play nice with ophyd-async. See https://github.com/DiamondLightSource/hyperion/issues/1159" +) +def test_when_blueskyrunner_initiated_then_plans_are_setup_and_devices_connected(): + zebra = MagicMock(spec=Zebra) + attenuator = MagicMock(spec=Attenuator) + + context = BlueskyContext() + context.device(zebra, "zebra") + context.device(attenuator, "attenuator") + + @dataclass + class FakeComposite: + attenuator: Attenuator + zebra: Zebra + + # A fake setup for a plan that uses two devices: attenuator and zebra. + def fake_create_devices(context) -> FakeComposite: + print("CREATING DEVICES") + return device_composite_from_context(context, FakeComposite) + + with patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", + { + "flyscan_xray_centre": { + "setup": fake_create_devices, + "run": MagicMock(), + "param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + }, + clear=True, + ): + print(PLAN_REGISTRY) + + BlueskyRunner( + RE=MagicMock(), + context=context, + skip_startup_connection=False, + ) + + zebra.wait_for_connection.assert_called() + attenuator.wait_for_connection.assert_called() + + +@patch( + "mx_bluesky.hyperion.experiment_plans.flyscan_xray_centre_plan.create_devices", + autospec=True, +) +def test_when_blueskyrunner_initiated_and_skip_flag_is_set_then_setup_called_upon_start( + mock_setup, test_fgs_params: ThreeDGridScan +): + mock_setup = MagicMock() + with patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", + { + "flyscan_xray_centre": { + "setup": mock_setup, + "run": MagicMock(), + "param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + }, + clear=True, + ): + runner = BlueskyRunner(MagicMock(), MagicMock(), skip_startup_connection=True) + mock_setup.assert_not_called() + runner.start(lambda: None, test_fgs_params, "flyscan_xray_centre", None) + mock_setup.assert_called_once() + runner.shutdown() + + +def test_when_blueskyrunner_initiated_and_skip_flag_is_not_set_then_all_plans_setup(): + mock_setup = MagicMock() + with patch.dict( + "mx_bluesky.hyperion.__main__.PLAN_REGISTRY", + { + "flyscan_xray_centre": { + "setup": mock_setup, + "param_type": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + "rotation_scan": { + "setup": mock_setup, + "param_type": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + "other_plan": { + "setup": mock_setup, + "param_type": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + "yet_another_plan": { + "setup": mock_setup, + "param_type": MagicMock(), + "experiment_param_type": MagicMock(), + "callback_collection_type": MagicMock(), + }, + }, + clear=True, + ): + BlueskyRunner(MagicMock(), MagicMock(), skip_startup_connection=False) + assert mock_setup.call_count == 4 + + +def test_log_on_invalid_json_params(test_env: ClientAndRunEngine): + test_env.mock_run_engine.RE_takes_time = False + response = test_env.client.put(TEST_BAD_PARAM_ENDPOINT, data='{"bad":1}').json + assert isinstance(response, dict) + assert response.get("status") == Status.FAILED.value + assert (message := response.get("message")) is not None + assert message.startswith( + "ValueError('Supplied parameters don\\'t match the plan for this endpoint" + ) + assert response.get("exception_type") == "ValueError" + + +@pytest.mark.skip( + reason="See https://github.com/DiamondLightSource/hyperion/issues/777" +) +def test_warn_exception_during_plan_causes_warning_in_log( + caplog: pytest.LogCaptureFixture, test_env: ClientAndRunEngine +): + test_env.client.put(START_ENDPOINT, data=TEST_PARAMS) + test_env.mock_run_engine.error = WarningException("D'Oh") + response_json = wait_for_run_engine_status(test_env.client) + assert response_json["status"] == Status.FAILED.value + assert response_json["message"] == 'WarningException("D\'Oh")' + assert response_json["exception_type"] == "WarningException" + assert caplog.records[-1].levelname == "WARNING" + + +@patch( + "dodal.devices.undulator_dcm.get_beamline_parameters", + return_value={"DCM_Perp_Offset_FIXED": 111}, +) +def test_when_context_created_then_contains_expected_number_of_plans( + get_beamline_parameters, +): + with patch.dict(os.environ, {"BEAMLINE": "i03"}): + context = setup_context(wait_for_connection=False) + + plan_names = context.plans.keys() + + assert "rotation_scan" in plan_names + assert "flyscan_xray_centre" in plan_names + assert "pin_tip_centre_then_xray_centre" in plan_names + assert "robot_load_then_centre" in plan_names diff --git a/tests/unit_tests/hyperion/test_utils.py b/tests/unit_tests/hyperion/test_utils.py new file mode 100644 index 000000000..7f951cbfd --- /dev/null +++ b/tests/unit_tests/hyperion/test_utils.py @@ -0,0 +1,25 @@ +import pytest + +from mx_bluesky.hyperion.utils.utils import ( + convert_angstrom_to_eV, + convert_eV_to_angstrom, +) + +test_wavelengths = [1.620709, 1.2398425, 0.9762539, 0.8265616, 0.68880138] +test_energies = [7650, 10000, 12700, 15000, 18000] + + +@pytest.mark.parametrize( + "test_wavelength, test_energy", + list(zip(test_wavelengths, test_energies, strict=False)), +) +def test_ev_to_a_converter(test_wavelength, test_energy): + assert convert_eV_to_angstrom(test_energy) == pytest.approx(test_wavelength) + + +@pytest.mark.parametrize( + "test_wavelength, test_energy", + list(zip(test_wavelengths, test_energies, strict=False)), +) +def test_a_to_ev_converter(test_wavelength, test_energy): + assert convert_angstrom_to_eV(test_wavelength) == pytest.approx(test_energy) diff --git a/tests/unit_tests/hyperion/utils/__init__.py b/tests/unit_tests/hyperion/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit_tests/hyperion/utils/test_callback_sim.py b/tests/unit_tests/hyperion/utils/test_callback_sim.py new file mode 100644 index 000000000..9be203528 --- /dev/null +++ b/tests/unit_tests/hyperion/utils/test_callback_sim.py @@ -0,0 +1,51 @@ +import pytest + +from ....conftest import DocumentCapturer + + +@pytest.fixture +def test_docs(): + return [ + ("start", {"uid": 12345, "abc": 56789, "xyz": 99999}), + ("stop", {"uid": 77777, "abc": 88888, "xyz": 99999}), + ] + + +def test_callback_sim_doc_names(test_docs): + DocumentCapturer.assert_doc(test_docs, "start") + DocumentCapturer.assert_doc(test_docs, "stop") + DocumentCapturer.assert_doc(test_docs, "restart", does_exist=False) + + +def test_callback_sim_has_fields(test_docs): + DocumentCapturer.assert_doc(test_docs, "start", has_fields=["uid"]) + DocumentCapturer.assert_doc(test_docs, "stop", has_fields=["abc", "xyz"]) + DocumentCapturer.assert_doc( + test_docs, "start", has_fields=["uid", "bbb"], does_exist=False + ) + + +def test_callback_sim_matches_fields(test_docs): + DocumentCapturer.assert_doc(test_docs, "start", matches_fields={"uid": 12345}) + DocumentCapturer.assert_doc( + test_docs, "stop", matches_fields={"abc": 88888, "xyz": 99999} + ) + DocumentCapturer.assert_doc( + test_docs, + "start", + matches_fields={"abc": 88888, "xyz": 99799}, + does_exist=False, + ) + + +def test_callback_sim_assert_switch(test_docs): + with pytest.raises(AssertionError): + DocumentCapturer.assert_doc(test_docs, "restart") + + with pytest.raises(AssertionError): + DocumentCapturer.assert_doc(test_docs, "start", has_fields=["uid", "bbb"]) + + with pytest.raises(AssertionError): + DocumentCapturer.assert_doc( + test_docs, "start", matches_fields={"abc": 88888, "xyz": 99799} + ) diff --git a/tests/unit_tests/hyperion/utils/test_context.py b/tests/unit_tests/hyperion/utils/test_context.py new file mode 100644 index 000000000..acff4a8d8 --- /dev/null +++ b/tests/unit_tests/hyperion/utils/test_context.py @@ -0,0 +1,74 @@ +import dataclasses +from unittest.mock import MagicMock + +import pytest +from ophyd.device import Device + +from mx_bluesky.hyperion.utils.context import ( + device_composite_from_context, + find_device_in_context, +) + + +class _DeviceType1(Device): + pass + + +class _DeviceType2(Device): + pass + + +def test_find_device_in_context(): + context = MagicMock() + device = MagicMock(spec=Device) + context.find_device.return_value = device + + found_device = find_device_in_context(context, "", expected_type=Device) # type: ignore + assert found_device == device + + +def find_device_in_context_with_wrong_type_raises_error(): + context = MagicMock() + + device = MagicMock(spec=_DeviceType1) + context.find_device.return_value = device + + # Should not raise + find_device_in_context(context, "", expected_type=_DeviceType1) + + with pytest.raises(ValueError): + # Should raise + find_device_in_context(context, "", expected_type=_DeviceType2) + + +def test_find_nonexistent_device_in_context_raises_error(): + context = MagicMock() + context.find_device.return_value = None + + with pytest.raises(ValueError): + find_device_in_context(context, "", Device) + + +def test_device_composite_from_context(): + context = MagicMock() + + @dataclasses.dataclass + class _Composite: + device1: _DeviceType1 + device2: _DeviceType2 + + device1_instance = MagicMock(spec=_DeviceType1) + device2_instance = MagicMock(spec=_DeviceType2) + + context.find_device = lambda name: { + "device1": device1_instance, + "device2": device2_instance, + }.get(name) + + composite = device_composite_from_context(context, _Composite) + + assert composite.device1 == device1_instance + assert isinstance(composite.device1, _DeviceType1) + + assert composite.device2 == device2_instance + assert isinstance(composite.device2, _DeviceType2) diff --git a/tests/test_cli.py b/tests/unit_tests/test_cli.py similarity index 100% rename from tests/test_cli.py rename to tests/unit_tests/test_cli.py diff --git a/tests/test_example.py b/tests/unit_tests/test_example.py similarity index 100% rename from tests/test_example.py rename to tests/unit_tests/test_example.py diff --git a/utility_scripts/beam_off_trickery.sh b/utility_scripts/beam_off_trickery.sh new file mode 100755 index 000000000..153e33cb7 --- /dev/null +++ b/utility_scripts/beam_off_trickery.sh @@ -0,0 +1,11 @@ +FOOLED_VALUE="100000" +INIT_VALUE=`caget -t BL03I-EA-FDBK-01:THRESHOLDPC_XBPM2` + +trap "caput BL03I-EA-FDBK-01:THRESHOLDPC_XBPM2 $INIT_VALUE" EXIT HUP + +echo "Correct value found, setting to value to fool BPM" +caput BL03I-EA-FDBK-01:THRESHOLDPC_XBPM2 $FOOLED_VALUE + +echo "Will set back to $INIT_VALUE on termination" + +sleep infinity diff --git a/utility_scripts/build_imginfo.sh b/utility_scripts/build_imginfo.sh new file mode 100755 index 000000000..05d47627c --- /dev/null +++ b/utility_scripts/build_imginfo.sh @@ -0,0 +1,56 @@ +#!/bin/bash +function help { + cat < + Create and build imginfo in the specified directory +Options: + -p use podman instead of docker + -d build debug imginfo +`basename $0` --help|-h + This help +END + exit 1 +} + +if [ -z "$1" ]; then + help +fi + +DOCKER=docker +while [ -n "$1" ] ; do + case $1 in + --help|-h) + help + ;; + -d) + DOCKERFILE=Dockerfile-imginfo-debug + shift + ;; + -p) + DOCKER=podman + shift + ;; + *) + break + ;; + esac +done +if [ -z "$1" ]; then + echo "workspace not specified" + exit 1 +fi + +WORKDIR=$1 +IMGINFO_TAG=9810b92 +IMGINFO_GITHUB=git@github.com:githubgphl/imginfo.git +if [ -d ${WORKDIR} ]; then + rm -r ${WORKDIR} +fi +git clone ${IMGINFO_GITHUB} ${WORKDIR} +git checkout ${IMGINFO_TAG} +if [ -z "$DOCKERFILE" ]; then + DOCKERFILE=${WORKDIR}/Dockerfile +else + DOCKERFILE=${WORKDIR}/${DOCKERFILE} +fi +${DOCKER} build ${WORKDIR} --file ${DOCKERFILE} --tag imginfo diff --git a/utility_scripts/deploy/create_venv.py b/utility_scripts/deploy/create_venv.py new file mode 100644 index 000000000..393826518 --- /dev/null +++ b/utility_scripts/deploy/create_venv.py @@ -0,0 +1,25 @@ +import os +import sys +from subprocess import PIPE, CalledProcessError, Popen + + +def setup_venv(path_to_create_venv_script, deployment_directory): + # Set up environment and run /dls_dev_env.sh... + os.chdir(deployment_directory) + print(f"Setting up environment in {deployment_directory}") + + with Popen( + path_to_create_venv_script, stdout=PIPE, bufsize=1, universal_newlines=True + ) as p: + if p.stdout is not None: + for line in p.stdout: + print(line, end="") + if p.returncode != 0: + raise CalledProcessError(p.returncode, p.args) + + +if __name__ == "__main__": + # This should only be entered from the control machine + path_to_create_venv_script = sys.argv[1] + deployment_directory = sys.argv[2] + setup_venv(path_to_create_venv_script, deployment_directory) diff --git a/deploy/deploy_edm_for_ssx.sh b/utility_scripts/deploy/deploy_edm_for_ssx.sh similarity index 100% rename from deploy/deploy_edm_for_ssx.sh rename to utility_scripts/deploy/deploy_edm_for_ssx.sh diff --git a/utility_scripts/deploy/deploy_hyperion.py b/utility_scripts/deploy/deploy_hyperion.py new file mode 100644 index 000000000..ec96eca87 --- /dev/null +++ b/utility_scripts/deploy/deploy_hyperion.py @@ -0,0 +1,206 @@ +import argparse +import os +import re +import subprocess +from uuid import uuid1 + +from create_venv import setup_venv +from git import Repo +from packaging.version import VERSION_PATTERN, Version + +recognised_beamlines = ["dev", "i03", "i04"] + +VERSION_PATTERN_COMPILED = re.compile( + f"^{VERSION_PATTERN}$", re.VERBOSE | re.IGNORECASE +) + +DEV_DEPLOY_LOCATION = "/scratch/30day_tmp/hyperion_release_test/bluesky" + + +class repo: + # Set name, setup remote origin, get the latest version""" + def __init__(self, name: str, repo_args): + self.name = name + self.repo = Repo(repo_args) + + self.origin = self.repo.remotes.origin + self.origin.fetch() + self.origin.fetch("refs/tags/*:refs/tags/*") + + self.versions = [ + t.name for t in self.repo.tags if VERSION_PATTERN_COMPILED.match(t.name) + ] + self.versions.sort(key=Version, reverse=True) + print(f"Found {self.name}_versions:\n{os.linesep.join(self.versions)}") + self.latest_version_str = self.versions[0] + + def deploy(self, url): + print(f"Cloning latest version {self.name} into {self.deploy_location}") + + deploy_repo = Repo.init(self.deploy_location) + deploy_origin = deploy_repo.create_remote("origin", self.origin.url) + + deploy_origin.fetch() + deploy_origin.fetch("refs/tags/*:refs/tags/*") + deploy_repo.git.checkout(self.latest_version_str) + + print("Setting permissions") + groups_to_give_permission = ["i03_staff", "gda2", "dls_dasc"] + setfacl_params = ",".join( + [f"g:{group}:rwx" for group in groups_to_give_permission] + ) + + # Set permissions and defaults + os.system(f"setfacl -R -m {setfacl_params} {self.deploy_location}") + os.system(f"setfacl -dR -m {setfacl_params} {self.deploy_location}") + + # Deploy location depends on the latest hyperion version (...software/bluesky/hyperion_V...) + def set_deploy_location(self, release_area): + self.deploy_location = os.path.join(release_area, self.name) + if os.path.isdir(self.deploy_location): + raise Exception( + f"{self.deploy_location} already exists, stopping deployment for {self.name}" + ) + + +# Get the release directory based off the beamline and the latest hyperion version +def get_hyperion_release_dir_from_args() -> str: + parser = argparse.ArgumentParser() + parser.add_argument( + "beamline", + type=str, + choices=recognised_beamlines, + help="The beamline to deploy hyperion to", + ) + + args = parser.parse_args() + if args.beamline == "dev": + print("Running as dev") + return DEV_DEPLOY_LOCATION + else: + return f"/dls_sw/{args.beamline}/software/bluesky" + + +def create_environment_from_control_machine(): + try: + user = os.environ["USER"] + except KeyError: + user = input( + "Couldn't find username from the environment. Enter FedID in order to SSH to control machine:" + ) + cmd = f"ssh {user}@i03-control python3 {path_to_create_venv} {path_to_dls_dev_env} {hyperion_repo.deploy_location}" + + process = None + try: + # Call python script on i03-control to create the environment + process = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + stdout, stderr = process.communicate() + if process.returncode != 0: + print(f"Error occurred: {stderr.decode()}") + else: + print(f"Output: {stdout.decode()}") + except Exception as e: + print(f"Exception while trying to install venv on i03-control: {e}") + finally: + if process: + process.kill() + + +if __name__ == "__main__": + # Gives path to /bluesky + release_area = get_hyperion_release_dir_from_args() + + this_repo_top = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) + + print(f"Repo top is {this_repo_top}") + + hyperion_repo = repo( + name="hyperion", + repo_args=os.path.join(this_repo_top, ".git"), + ) + + if hyperion_repo.name != "hyperion": + raise ValueError("This function should only be used with the hyperion repo") + + release_area_version = os.path.join( + release_area, f"hyperion_{hyperion_repo.latest_version_str}" + ) + + print(f"Putting releases into {release_area_version}") + + dodal_repo = repo( + name="dodal", + repo_args=os.path.join(this_repo_top, "../dodal/.git"), + ) + + dodal_repo.set_deploy_location(release_area_version) + hyperion_repo.set_deploy_location(release_area_version) + + # Deploy hyperion repo + hyperion_repo.deploy(hyperion_repo.origin.url) + + # Get version of dodal that latest hyperion version uses + with open(f"{release_area_version}/hyperion/setup.cfg") as setup_file: + dodal_url = [ + line + for line in setup_file + if "https://github.com/DiamondLightSource/python-dodal" in line + ] + + # Now deploy the correct version of dodal + dodal_repo.deploy(dodal_url) + + if hyperion_repo.name == "hyperion": + path_to_dls_dev_env = os.path.join( + hyperion_repo.deploy_location, "utility_scripts/dls_dev_env.sh" + ) + path_to_create_venv = os.path.join( + hyperion_repo.deploy_location, "utility_scripts/deploy/create_venv.py" + ) + + # SSH into control machine if not in dev mode + if release_area != DEV_DEPLOY_LOCATION: + create_environment_from_control_machine() + else: + setup_venv(path_to_create_venv, hyperion_repo.deploy_location) + + def create_symlink_by_tmp_and_rename(dirname, target, linkname): + tmp_name = str(uuid1()) + target_path = os.path.join(dirname, target) + linkname_path = os.path.join(dirname, linkname) + tmp_path = os.path.join(dirname, tmp_name) + os.symlink(target_path, tmp_path) + os.rename(tmp_path, linkname_path) + + move_symlink = input( + """Move symlink (y/n)? WARNING: this will affect the running version! +Only do so if you have informed the beamline scientist and you're sure Hyperion is not running. +""" + ) + # Creates symlinks: software/bluesky/hyperion_latest -> software/bluesky/hyperion_{version}/hyperion + # software/bluesky/hyperion -> software/bluesky/hyperion_latest + if move_symlink == "y": + old_live_location = os.path.relpath( + os.path.realpath(os.path.join(release_area, "hyperion")), release_area + ) + make_live_stable_symlink = input( + f"The last live deployment was {old_live_location}, do you want to set this as the stable version? (y/n)" + ) + if make_live_stable_symlink == "y": + create_symlink_by_tmp_and_rename( + release_area, old_live_location, "hyperion_stable" + ) + + relative_deploy_loc = os.path.join( + os.path.relpath(hyperion_repo.deploy_location, release_area) + ) + create_symlink_by_tmp_and_rename( + release_area, relative_deploy_loc, "hyperion_latest" + ) + create_symlink_by_tmp_and_rename(release_area, "hyperion_latest", "hyperion") + print(f"New version moved to {hyperion_repo.deploy_location}") + print("To start this version run hyperion_restart from the beamline's GDA") + else: + print("Quitting without latest version being updated") diff --git a/deploy/deploy_mxbluesky.py b/utility_scripts/deploy/deploy_mxbluesky.py similarity index 100% rename from deploy/deploy_mxbluesky.py rename to utility_scripts/deploy/deploy_mxbluesky.py diff --git a/utility_scripts/deploy/test_deploy.py b/utility_scripts/deploy/test_deploy.py new file mode 100644 index 000000000..5a91d5a16 --- /dev/null +++ b/utility_scripts/deploy/test_deploy.py @@ -0,0 +1,11 @@ +import subprocess + +cmd = "ssh qqh35939@i03-control" +process = subprocess.Popen( + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE +) +stdout, stderr = process.communicate() +if process.returncode != 0: + print(f"Error occurred: {stderr.decode()}") +else: + print(f"Output: {stdout.decode()}") diff --git a/utility_scripts/dev_jaeger_container.sh b/utility_scripts/dev_jaeger_container.sh new file mode 100755 index 000000000..5dd04053f --- /dev/null +++ b/utility_scripts/dev_jaeger_container.sh @@ -0,0 +1,14 @@ +podman run -d --name jaeger \ + -e COLLECTOR_ZIPKIN_HOST_PORT=:9411 \ + -e COLLECTOR_OTLP_ENABLED=true \ + -p 6831:6831/udp \ + -p 6832:6832/udp \ + -p 5778:5778 \ + -p 16686:16686 \ + -p 4317:4317 \ + -p 4318:4318 \ + -p 14250:14250 \ + -p 14268:14268 \ + -p 14269:14269 \ + -p 9411:9411 \ + jaegertracing/all-in-one:1.39 diff --git a/dls_dev_setup.sh b/utility_scripts/dls_dev_env.sh similarity index 55% rename from dls_dev_setup.sh rename to utility_scripts/dls_dev_env.sh index dfc8489eb..3a20909ca 100755 --- a/dls_dev_setup.sh +++ b/utility_scripts/dls_dev_env.sh @@ -1,8 +1,14 @@ #!/bin/bash -# controls_dev sets pip up to look at a local pypi server, which is incomplete -module unload controls_dev +# Check we're in the right place +dir_name=${PWD##*/} +if [ "$dir_name" != "mx-bluesky" ]; then + echo "This script should be run from the 'mx-bluesky' directory" + exit 1 +fi +# controls_dev sets pip up to look at a local pypi server, which is incomplete +module unload controls_dev module load python/3.11 @@ -15,18 +21,20 @@ mkdir .venv python -m venv .venv source .venv/bin/activate - pip install --upgrade pip pip install wheel - pip install -e .[dev] +pre-commit install + # Ensure we use a local version of dodal if [ ! -d "../dodal" ]; then git clone git@github.com:DiamondLightSource/dodal.git ../dodal fi -pip uninstall -y dodal pip install -e ../dodal[dev] -tox -p +# get dlstbx into our env +ln -s /dls_sw/apps/dials/latest/latest/modules/dlstbx/src/dlstbx/ .venv/lib/python3.11/site-packages/dlstbx + +pytest -m "not s03" diff --git a/utility_scripts/graylog/Dockerfile b/utility_scripts/graylog/Dockerfile new file mode 100644 index 000000000..82cefdc3f --- /dev/null +++ b/utility_scripts/graylog/Dockerfile @@ -0,0 +1,10 @@ +FROM graylog/graylog:4.2 + +ENV GRAYLOG_ROOT_PASSWORD_SHA2 8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918 +ENV GRAYLOG_PASSWORD_SECRET thisisasecretstring + +COPY tcp_input.json /usr/share/graylog/data/contentpacks/tcp_input.json + +ENV GRAYLOG_CONTENT_PACKS_LOADER_ENABLED true +ENV GRAYLOG_CONTENT_PACKS_DIR data/contentpacks +ENV GRAYLOG_CONTENT_PACKS_AUTO_INSTALL tcp_input.json diff --git a/utility_scripts/graylog/tcp_input.json b/utility_scripts/graylog/tcp_input.json new file mode 100644 index 000000000..d5f6a24ac --- /dev/null +++ b/utility_scripts/graylog/tcp_input.json @@ -0,0 +1,103 @@ +{ + "id": "c7c601fc-5090-4a0c-a4f3-e757968eeca2", + "rev": 1, + "v": "1", + "name": "Hyperion TCP Input", + "summary": "Hyperion GELF TCP input.", + "description": "", + "vendor": "DLS", + "url": "", + "created_at": "2022-09-01T13:55:24.405Z", + "server_version": "4.2.13+9c90b93", + "parameters": [], + "entities": [ + { + "id": "82d25c14-574d-4f3e-be12-dd7e76e6ee03", + "type": { + "name": "input", + "version": "1" + }, + "v": "1", + "data": { + "title": { + "@type": "string", + "@value": "Hyperion GELF TCP" + }, + "configuration": { + "tls_key_file": { + "@type": "string", + "@value": "" + }, + "port": { + "@type": "integer", + "@value": 5555 + }, + "tls_enable": { + "@type": "boolean", + "@value": false + }, + "use_null_delimiter": { + "@type": "boolean", + "@value": true + }, + "recv_buffer_size": { + "@type": "integer", + "@value": 1048576 + }, + "tcp_keepalive": { + "@type": "boolean", + "@value": false + }, + "tls_client_auth_cert_file": { + "@type": "string", + "@value": "" + }, + "bind_address": { + "@type": "string", + "@value": "127.0.0.1" + }, + "tls_cert_file": { + "@type": "string", + "@value": "" + }, + "max_message_size": { + "@type": "integer", + "@value": 2097152 + }, + "tls_client_auth": { + "@type": "string", + "@value": "disabled" + }, + "decompress_size_limit": { + "@type": "integer", + "@value": 8388608 + }, + "number_worker_threads": { + "@type": "integer", + "@value": 2 + }, + "tls_key_password": { + "@type": "string", + "@value": "" + } + }, + "static_fields": {}, + "type": { + "@type": "string", + "@value": "org.graylog2.inputs.gelf.tcp.GELFTCPInput" + }, + "global": { + "@type": "boolean", + "@value": true + }, + "extractors": [] + }, + "constraints": [ + { + "type": "server-version", + "version": ">=4.2.13+9c90b93" + } + ] + } + ] +} diff --git a/utility_scripts/run_imginfo.sh b/utility_scripts/run_imginfo.sh new file mode 100755 index 000000000..925182a12 --- /dev/null +++ b/utility_scripts/run_imginfo.sh @@ -0,0 +1,55 @@ +#!/bin/bash +function help { + cat < + Run imginfo against the specified master file +Options: + -s Start an interactive shell instead of executing imginfo + +Environment: + DOCKER set to 'podman' to use podman otherwise docker is the default + +`basename $0` --help|-h + This help +END + exit 1 +} + +if [ -z "$1" ]; then + help +fi + +if [ -z "$DOCKER" ]; then + DOCKER=docker +fi + +while [ -n "$1" ] ; do + case $1 in + --help|-h) + help + ;; + -s) + SHELL_MODE=1 + shift; + ;; + *) + break + ;; + esac +done +IMAGE_NAME=imginfo:latest + +if ! ${DOCKER} image inspect ${IMAGE_NAME} > /dev/null; then + echo "$DOCKER image ${IMAGE_NAME} does not exist." + exit 2 +fi + +MASTER_FILE=`readlink -f $1` +PARENT_DIR=`dirname $MASTER_FILE` +MASTER_FILENAME=`basename $MASTER_FILE` +if [ -n "$SHELL_MODE" ]; then + ${DOCKER} run -t -i -v ${PARENT_DIR}:/data/:z ${IMAGE_NAME} /bin/bash +else + ${DOCKER} run -v ${PARENT_DIR}:/data/ ${IMAGE_NAME} ./imginfo /data/${MASTER_FILENAME} +fi diff --git a/utility_scripts/setup_graylog.sh b/utility_scripts/setup_graylog.sh new file mode 100755 index 000000000..10ec294be --- /dev/null +++ b/utility_scripts/setup_graylog.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +podman build ./graylog --format docker -t graylog:test + +podman pod create -n hyperion-graylog-pod + +podman run -d --net host --pod=hyperion-graylog-pod --name=hyperion-mongo mongo:4.2 +podman run -d --net host --pod=hyperion-graylog-pod -e "http.host=0.0.0.0" -e "discovery.type=single-node" -e "ES_JAVA_OPTS=-Xms512m -Xmx512m" --name=hyperion-elasticsearch docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.0 +podman run -d --net host --pod=hyperion-graylog-pod -e GRAYLOG_HTTP_EXTERNAL_URI="http://localhost:9000/" -e GRAYLOG_MONGODB_URI="mongodb://localhost:27017/graylog" -e GRAYLOG_ELASTICSEARCH_HOSTS="http://localhost:9200/" --name=hyperion-graylog localhost/graylog:test diff --git a/utility_scripts/strip_metafile.py b/utility_scripts/strip_metafile.py new file mode 100755 index 000000000..8663c52ee --- /dev/null +++ b/utility_scripts/strip_metafile.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python3 +# This script requires python >= 3.10 but we can't warn about this because +# it will fail at compile time before we can execute a version check +import subprocess +import sys +from pathlib import Path +from shutil import copyfile +from tempfile import TemporaryDirectory + +import h5py + + +def main() -> int: + filenames = [] + for option in sys.argv[1:]: + match option: + case "--help" | "-h": + command = sys.argv[0] + print( + f"{command} [metafile]" + f"\n\tStrip the /flatfield and /mask from an HDF5 meta file to make it much smaller" + f"\n\tTHIS WILL MODIFY THE FILE" + f"\n{command} -h | --help" + f"\n\tThis help" + ) + return 0 + case arg: + filenames.append(arg) + + if len(filenames) < 1: + sys.stderr.write("Input and/or output file name not supplied\n") + return 1 + + inputfile = filenames[0] + inputpath = Path(inputfile) + + with TemporaryDirectory() as tempdir: + tmpfile = f"{tempdir}/{inputpath.name}" + copyfile(inputfile, tmpfile) + with h5py.File(tmpfile, "r+") as metafile: + del metafile["flatfield"] + metafile.create_dataset("flatfield", (4362, 4148), "