From bf486f57ad246f0d1004e59dfdbe2b8dc497aec1 Mon Sep 17 00:00:00 2001 From: Jan Lienemann Date: Thu, 20 Jul 2023 11:44:58 +0000 Subject: [PATCH] Release 2.11.0 --- .../00_reference/00_session_reference.ipynb | 52 +- .../01_calibration_reference.ipynb | 92 +- .../02_experiment_definition_reference.ipynb | 95 +- .../00_reference/03_section_tutorial.ipynb | 100 +- .../05_pulse_inspector_plotter.ipynb | 41 +- .../07_waveform_replacement.ipynb | 22 +- examples/00_reference/08_node_sweeping.ipynb | 16 +- .../00_reference/10_database_interface.ipynb | 4 +- .../04_propagation_delay.ipynb | 41 +- ...active_qubit_reset_shfsg_shfqa_shfqc.ipynb | 50 +- .../01_randomized_benchmarking.ipynb | 32 +- .../00_qubit_tuneup_shfsg_shfqa_shfqc.ipynb | 124 +- .../01_single_qubit_tuneup_uhfqa_hdawg.ipynb | 134 +- ...02_two_qubit_experiments_uhfqa_hdawg.ipynb | 48 +- .../03_qubit_tuneup_shfqc_ext_dc_source.ipynb | 2 +- ...rallel_qubit_tuneup_shfqc_hdawg_pqsc.ipynb | 1463 +++++++++++++++++ .../00_user_function_sweeps.ipynb | 18 +- .../00_shfsg_basic_experiments.ipynb | 153 +- examples/06_qasm/01_VQE_Qiskit.ipynb | 115 +- examples/06_qasm/02_Two_Qubit_RB_Qiskit.ipynb | 649 ++++++++ .../03_Two_Qubit_RB_pyGSTi_OpenQASM.ipynb | 701 ++++++++ laboneq/VERSION.txt | 2 +- .../code_generator/analyze_playback.py | 6 +- .../code_generator/sampled_event_handler.py | 312 ++-- .../code_generator/seq_c_generator.py | 37 +- .../compiler/code_generator/seqc_tracker.py | 2 +- .../compiler/experiment_access/device_info.py | 18 - .../compiler/experiment_access/dsl_loader.py | 158 +- .../experiment_access/experiment_dao.py | 39 +- .../compiler/experiment_access/json_dumper.py | 24 +- .../compiler/experiment_access/json_loader.py | 73 +- .../compiler/experiment_access/loader_base.py | 60 +- .../experiment_access/oscillator_info.py | 15 - .../compiler/experiment_access/pulse_def.py | 9 - .../experiment_access/section_info.py | 1 + .../compiler/experiment_access/signal_info.py | 1 - laboneq/compiler/qccs-schema_2_5_0.json | 14 +- .../scheduler/loop_iteration_schedule.py | 7 +- laboneq/compiler/scheduler/loop_schedule.py | 5 +- laboneq/compiler/scheduler/match_schedule.py | 40 +- laboneq/compiler/scheduler/parameter_store.py | 10 +- laboneq/compiler/scheduler/pulse_phase.py | 4 +- laboneq/compiler/scheduler/scheduler.py | 125 +- laboneq/compiler/workflow/compiler.py | 53 +- laboneq/compiler/workflow/recipe_generator.py | 31 +- .../example_notebook_helper.py | 87 +- .../example_helpers/feedback_helper.py | 117 +- .../example_helpers/generate_descriptor.py | 1079 ++++++++++++ .../setup_installation_helpers/README.md | 111 ++ .../setup_installation_helpers/__init__.py | 2 + .../cable_checker.py | 446 +++++ laboneq/controller/communication.py | 2 +- laboneq/controller/controller.py | 55 +- laboneq/controller/devices/device_hdawg.py | 11 +- laboneq/controller/devices/device_pqsc.py | 8 +- laboneq/controller/devices/device_shfppc.py | 6 +- laboneq/controller/devices/device_shfqa.py | 42 +- laboneq/controller/devices/device_shfsg.py | 17 +- laboneq/controller/devices/device_uhfqa.py | 25 +- laboneq/controller/devices/device_zi.py | 26 +- laboneq/controller/devices/zi_emulator.py | 1 + laboneq/controller/near_time_runner.py | 6 +- laboneq/controller/protected_session.py | 22 + laboneq/controller/recipe_enums.py | 39 - laboneq/controller/recipe_processor.py | 53 +- laboneq/controller/results.py | 17 +- laboneq/core/types/enums/acquisition_type.py | 5 + laboneq/data/__init__.py | 7 + laboneq/data/calibration/__init__.py | 68 +- laboneq/data/compilation_job/__init__.py | 193 ++- laboneq/data/execution_payload/__init__.py | 120 +- .../execution_payload_helper.py | 23 - .../data/experiment_description/__init__.py | 123 +- .../experiment_helper.py | 2 +- laboneq/data/experiment_results/__init__.py | 62 +- laboneq/data/parameter/__init__.py | 28 + laboneq/data/recipe.py | 142 ++ laboneq/data/scheduled_experiment.py | 9 +- laboneq/data/setup_description/__init__.py | 109 +- .../data/setup_description/setup_helper.py | 41 +- laboneq/dsl/device/instruments/nonqc.py | 3 +- laboneq/dsl/experiment/builtins.py | 263 +++ laboneq/dsl/experiment/context.py | 49 + laboneq/dsl/experiment/experiment.py | 88 +- laboneq/dsl/experiment/experiment_context.py | 61 + laboneq/dsl/experiment/pulse_library.py | 2 +- laboneq/dsl/experiment/section.py | 15 +- laboneq/dsl/experiment/section_context.py | 238 +++ laboneq/dsl/laboneq_facade.py | 4 +- laboneq/dsl/parameter.py | 91 +- laboneq/dsl/quantum/__init__.py | 6 +- .../quantum/{qubits.py => quantum_element.py} | 239 +-- ...tum_operations.py => quantum_operation.py} | 2 +- laboneq/dsl/quantum/qubit.py | 168 ++ laboneq/dsl/quantum/transmon.py | 195 +++ laboneq/dsl/result/acquired_result.py | 54 +- laboneq/dsl/serialization/serializer.py | 8 +- laboneq/dsl/session.py | 4 +- .../compilation_service.py | 45 - .../compilation_service_legacy.py | 8 +- .../implementation/data_storage/__init__.py | 2 + ...atabase_wrapper.py => laboneq_database.py} | 9 +- .../services}/__init__.py | 0 .../services/sqlite_dict.py} | 2 +- .../device_setup_generator.py | 205 ++- .../experiment_workflow.py | 24 +- .../legacy_adapters/__init.py__ | 0 .../__init__.py => converters_calibration.py} | 5 +- .../post_process_calibration.py | 6 - ...y => converters_experiment_description.py} | 23 +- .../converters_experiment_results/__init__.py | 57 - .../post_process_experiment_results.py | 6 - ...t__.py => converters_setup_description.py} | 26 +- .../converters_target_setup.py} | 2 +- .../legacy_adapters/device_setup_converter.py | 43 + .../__init__.py => legacy_dsl_adapters.py} | 14 +- .../post_process_experiment_description.py | 0 .../post_process_setup_description.py | 23 +- .../implementation/legacy_adapters/simple2.py | 33 - .../convert_from_legacy_json_recipe.py} | 226 +-- .../payload_builder/payload_builder.py | 429 +---- .../implementation/runner/runner_legacy.py | 191 +-- .../laboneq_settings.py | 2 +- .../data_storage/data_storage_api.py | 2 +- laboneq/interfaces/runner/runner_api.py | 11 +- .../interfaces/runner/runner_control_api.py | 14 +- laboneq/openqasm3/gate_store.py | 6 +- laboneq/openqasm3/openqasm3_importer.py | 6 +- laboneq/simple.py | 10 +- requirements-dev.txt | 7 + 130 files changed, 7972 insertions(+), 3057 deletions(-) create mode 100644 examples/03_superconducting_qubits/04_parallel_qubit_tuneup_shfqc_hdawg_pqsc.ipynb create mode 100644 examples/06_qasm/02_Two_Qubit_RB_Qiskit.ipynb create mode 100644 examples/06_qasm/03_Two_Qubit_RB_pyGSTi_OpenQASM.ipynb delete mode 100644 laboneq/compiler/experiment_access/device_info.py delete mode 100644 laboneq/compiler/experiment_access/oscillator_info.py create mode 100644 laboneq/contrib/example_helpers/generate_descriptor.py create mode 100644 laboneq/contrib/setup_installation_helpers/README.md create mode 100644 laboneq/contrib/setup_installation_helpers/__init__.py create mode 100644 laboneq/contrib/setup_installation_helpers/cable_checker.py delete mode 100644 laboneq/controller/recipe_enums.py delete mode 100644 laboneq/data/execution_payload/execution_payload_helper.py create mode 100644 laboneq/data/parameter/__init__.py create mode 100644 laboneq/data/recipe.py create mode 100644 laboneq/dsl/experiment/builtins.py create mode 100644 laboneq/dsl/experiment/context.py create mode 100644 laboneq/dsl/experiment/experiment_context.py create mode 100644 laboneq/dsl/experiment/section_context.py rename laboneq/dsl/quantum/{qubits.py => quantum_element.py} (52%) rename laboneq/dsl/quantum/{quantum_operations.py => quantum_operation.py} (98%) create mode 100644 laboneq/dsl/quantum/qubit.py create mode 100644 laboneq/dsl/quantum/transmon.py delete mode 100644 laboneq/implementation/compilation_service/compilation_service.py rename laboneq/implementation/data_storage/{l1q_database_wrapper.py => laboneq_database.py} (58%) rename laboneq/implementation/{data_storage_service => data_storage/services}/__init__.py (100%) rename laboneq/implementation/{data_storage_service/data_storage_service_sqlite_dict.py => data_storage/services/sqlite_dict.py} (99%) delete mode 100644 laboneq/implementation/legacy_adapters/__init.py__ rename laboneq/implementation/legacy_adapters/{converters_calibration/__init__.py => converters_calibration.py} (99%) delete mode 100644 laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py rename laboneq/implementation/legacy_adapters/{converters_experiment_description/__init__.py => converters_experiment_description.py} (97%) delete mode 100644 laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py delete mode 100644 laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py rename laboneq/implementation/legacy_adapters/{converters_setup_description/__init__.py => converters_setup_description.py} (93%) rename laboneq/{dsl/new_arch_support.py => implementation/legacy_adapters/converters_target_setup.py} (99%) create mode 100644 laboneq/implementation/legacy_adapters/device_setup_converter.py rename laboneq/implementation/legacy_adapters/{legacy_dsl_adapters/__init__.py => legacy_dsl_adapters.py} (96%) rename laboneq/implementation/legacy_adapters/{converters_experiment_description => }/post_process_experiment_description.py (100%) rename laboneq/implementation/legacy_adapters/{converters_setup_description => }/post_process_setup_description.py (88%) delete mode 100644 laboneq/implementation/legacy_adapters/simple2.py rename laboneq/{controller/recipe_1_4_0.py => implementation/payload_builder/convert_from_legacy_json_recipe.py} (59%) diff --git a/examples/00_reference/00_session_reference.ipynb b/examples/00_reference/00_session_reference.ipynb index 4fdb5f8..f58194e 100644 --- a/examples/00_reference/00_session_reference.ipynb +++ b/examples/00_reference/00_session_reference.ipynb @@ -6,7 +6,7 @@ "id": "771e7eff", "metadata": {}, "source": [ - "# Prerequisites" + "# Session Reference" ] }, { @@ -25,7 +25,7 @@ "from laboneq.simple import *\n", "\n", "# pretty printing\n", - "from pprint import pprint\n" + "from pprint import pprint" ] }, { @@ -73,7 +73,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -155,7 +155,7 @@ " \"acquire_line\"\n", " ].oscillator = Oscillator(\n", " uid=\"acquire_q1_osc\", frequency=1e8, modulation_type=ModulationType.SOFTWARE\n", - " )\n" + " )" ] }, { @@ -176,7 +176,7 @@ " server_port=\"8004\",\n", " setup_name=\"ZI_QCCS\",\n", ")\n", - "calibrate_devices(device_setup)\n" + "calibrate_devices(device_setup)" ] }, { @@ -245,7 +245,7 @@ " exp.play(\n", " signal=\"q0_measure\",\n", " pulse=pulse_library.const(length=1e-6, amplitude=0.0),\n", - " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error\n" + " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error" ] }, { @@ -270,7 +270,7 @@ "outputs": [], "source": [ "session = Session(device_setup=device_setup)\n", - "session.connect(do_emulation=True)\n" + "session.connect(do_emulation=True)" ] }, { @@ -294,7 +294,7 @@ }, "outputs": [], "source": [ - "compiled_experiment = session.compile(experiment=exp)\n" + "compiled_experiment = session.compile(experiment=exp)" ] }, { @@ -318,7 +318,7 @@ }, "outputs": [], "source": [ - "compiled_experiment = session.compiled_experiment\n" + "compiled_experiment = session.compiled_experiment" ] }, { @@ -344,7 +344,7 @@ }, "outputs": [], "source": [ - "my_results = session.run(compiled_experiment)\n" + "my_results = session.run(compiled_experiment)" ] }, { @@ -387,7 +387,7 @@ }, "outputs": [], "source": [ - "results = session.results\n" + "results = session.results" ] }, { @@ -412,7 +412,7 @@ }, "outputs": [], "source": [ - "results_copy = session.get_results()\n" + "results_copy = session.get_results()" ] }, { @@ -442,7 +442,7 @@ "experiment_calibration = results.experiment_calibration\n", "signal_map = results.signal_map\n", "compiled_experiment = results.compiled_experiment\n", - "acquired_results = results.acquired_results\n" + "acquired_results = results.acquired_results" ] }, { @@ -468,7 +468,7 @@ "source": [ "recipe = compiled_experiment.recipe\n", "src = compiled_experiment.src\n", - "waves = compiled_experiment.waves\n" + "waves = compiled_experiment.waves" ] }, { @@ -493,7 +493,7 @@ }, "outputs": [], "source": [ - "pprint(acquired_results)\n" + "pprint(acquired_results)" ] }, { @@ -529,7 +529,7 @@ "acquired_data = results.get_data(\"h\")\n", "# 1D -> one axis only\n", "axis_grid = results.get_axis(\"h\")[0]\n", - "axis_name = results.get_axis_name(\"h\")[0]\n" + "axis_name = results.get_axis_name(\"h\")[0]" ] }, { @@ -557,7 +557,7 @@ "metadata": {}, "outputs": [], "source": [ - "session.save(\"my_session.json\")\n" + "session.save(\"my_session.json\")" ] }, { @@ -582,7 +582,7 @@ "session.save_device_calibration(\"my_device_calibration.json\")\n", "session.save_experiment_calibration(\"my_experiment_calibration.json\")\n", "session.save_signal_map(\"my_signal_map.json\")\n", - "session.save_results(\"my_results.json\")\n" + "session.save_results(\"my_results.json\")" ] }, { @@ -601,7 +601,7 @@ "metadata": {}, "outputs": [], "source": [ - "new_session = Session.load(\"my_session.json\")\n" + "new_session = Session.load(\"my_session.json\")" ] }, { @@ -621,7 +621,7 @@ "outputs": [], "source": [ "new_session.connect(do_emulation=True)\n", - "new_results = new_session.run()\n" + "new_results = new_session.run()" ] }, { @@ -645,7 +645,7 @@ "session.load_compiled_experiment(\"my_compiled_experiment.json\")\n", "session.load_device_calibration(\"my_device_calibration.json\")\n", "session.load_experiment_calibration(\"my_experiment_calibration.json\")\n", - "session.load_signal_map(\"my_signal_map.json\")\n" + "session.load_signal_map(\"my_signal_map.json\")" ] }, { @@ -674,7 +674,7 @@ "# Signal Maps can only be loaded on and applied to an existing Experiment:\n", "my_experiment.load_signal_map(\"my_signal_map.json\")\n", "\n", - "print(my_device_setup)\n" + "print(my_device_setup)" ] }, { @@ -711,7 +711,7 @@ "import numpy as np\n", "\n", "plt.plot(axis_grid, np.absolute(acquired_data))\n", - "plt.xlabel(axis_name)\n" + "plt.xlabel(axis_name)" ] }, { @@ -732,7 +732,7 @@ "source": [ "from laboneq.simulator.output_simulator import OutputSimulator\n", "\n", - "simulator = OutputSimulator(compiled_experiment)\n" + "simulator = OutputSimulator(compiled_experiment)" ] }, { @@ -748,7 +748,7 @@ "acquire_port = qb[\"acquire_line\"].physical_channel\n", "\n", "snippet_measure = simulator.get_snippet(measure_iq_port, start=0, output_length=1e-5)\n", - "snippet_acquire = simulator.get_snippet(acquire_port, start=0, output_length=1e-5)\n" + "snippet_acquire = simulator.get_snippet(acquire_port, start=0, output_length=1e-5)" ] }, { @@ -772,7 +772,7 @@ "\n", "time = snippet_acquire.time\n", "ax[2].plot(time, snippet_acquire.wave.real)\n", - "ax[2].set_title(\"QA trigger\")\n" + "ax[2].set_title(\"QA trigger\")" ] } ], diff --git a/examples/00_reference/01_calibration_reference.ipynb b/examples/00_reference/01_calibration_reference.ipynb index 5dcc21b..5e0c098 100644 --- a/examples/00_reference/01_calibration_reference.ipynb +++ b/examples/00_reference/01_calibration_reference.ipynb @@ -5,7 +5,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# General Notes on Calibration" + "# Calibration Reference\n", + "\n", + "In this notebook, you'll learn how to calibrate a `DeviceSetup` and `Experiment` object in LabOne Q in multiple ways, as well as get, set, reset, and serialize your calibration." ] }, { @@ -48,7 +50,7 @@ "## Overriding Mechanism\n", "The calibration of a `DeviceSetup` is considered as the baseline calibration that is used for experiment execution.\n", "\n", - "This baseline calibration can be overriden non-destructively with calibration on an `Experiment`. If a `SignalCalibration` is defined i.e. not `None` on an `ExperimentSignal`, then actual values from that `SignalCalibration` are considered while the corresponding values in the `SignalCalibration` on the `LogicalSignal` are ignored and left unmodified. If there are values in the `SignalCalibration` on the `ExperimentSignal` that are set to `None`, these values are not considered and the corresponding values in the baseline `SignalCalibration` on the corresponding `LogicalSignal` remain effective. \n", + "This baseline calibration can be overridden non-destructively with calibration on an `Experiment`. If a `SignalCalibration` is defined i.e. not `None` on an `ExperimentSignal`, then actual values from that `SignalCalibration` are considered while the corresponding values in the `SignalCalibration` on the `LogicalSignal` are ignored and left unmodified. If there are values in the `SignalCalibration` on the `ExperimentSignal` that are set to `None`, these values are not considered and the corresponding values in the baseline `SignalCalibration` on the corresponding `LogicalSignal` remain effective. \n", "\n", "For example, the oscillator defined for a `LogicalSignal` can be overriden by an oscillator in the corresponding `ExperimentSignals`'s `SignalCalibration`. If this `SignalCalibration` only defines an oscillator but leaves all other values to `None`, only this oscillator will override the baseline oscillator. All values set to `None` e.g. like the mixer calibration values will leave the baseline values effective." ] @@ -67,6 +69,16 @@ "![Calibration%20Override.svg](attachment:Calibration%20Override.svg)" ] }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Device Calibration\n", + "\n", + "You'll start by importing LabOne Q, and defining a descriptor, here with an HDAWG, UHFQA, and PQSC." + ] + }, { "cell_type": "code", "execution_count": null, @@ -77,7 +89,7 @@ "from laboneq.simple import *\n", "\n", "# pretty printing\n", - "from pprint import pprint\n" + "from pprint import pprint" ] }, { @@ -110,7 +122,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -118,15 +130,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Device Calibration" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Define Uncalibrated `DeviceSetup`" + "## Define Uncalibrated `DeviceSetup`\n", + "\n", + "Using the descriptor, the `DeviceSetup` can be created. Initially, it is uncalibrated." ] }, { @@ -140,7 +146,7 @@ " server_host=\"111.22.33.44\",\n", " server_port=\"8004\",\n", " setup_name=\"ZI_QCCS\",\n", - ")\n" + ")" ] }, { @@ -159,7 +165,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.list_calibratables()\n" + "device_setup.list_calibratables()" ] }, { @@ -167,7 +173,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Calibrate `DeviceSetup`" + "## Calibrate `DeviceSetup`\n", + "\n", + "Below, you'll use different methods to use calibrations, either by setting the calibration directly to the `DeviceSetup` or by ceating a `Calibration` object." ] }, { @@ -221,7 +229,7 @@ " ),\n", " port_delay=0,\n", " delay_signal=0,\n", - ")\n" + ")" ] }, { @@ -266,7 +274,7 @@ "\n", "q0_signals[\"acquire_line\"].oscillator = Oscillator(\n", " uid=\"acquire_osc\", frequency=1e8, modulation_type=ModulationType.HARDWARE\n", - ")\n" + ")" ] }, { @@ -315,7 +323,7 @@ " Oscillator(\n", " uid=\"acquire_osc\", frequency=1e8, modulation_type=ModulationType.HARDWARE\n", " )\n", - ")\n" + ")" ] }, { @@ -332,7 +340,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.set_calibration(device_calib)\n" + "device_setup.set_calibration(device_calib)" ] }, { @@ -364,7 +372,7 @@ "device_setup.set_calibration(device_calib)\n", "print(device_setup.get_calibration())\n", "\n", - "assert device_calib == device_setup.get_calibration()\n" + "assert device_calib == device_setup.get_calibration()" ] }, { @@ -407,7 +415,7 @@ " \"q0_measure\",\n", " \"q0_acquire\",\n", " ],\n", - ")\n" + ")" ] }, { @@ -426,7 +434,7 @@ "metadata": {}, "outputs": [], "source": [ - "exp.list_calibratables()\n" + "exp.list_calibratables()" ] }, { @@ -444,7 +452,7 @@ "source": [ "### Variant 1: Direct Calibration at `Experiment` Definition Time\n", "\n", - "The constructor of the `ExperimentSignal` accepts calibration values of an `ExperimentSignalCalibration` and forwards them internally when assiging a `ExperimentSignalCalibration` object to the `calibration` property:" + "The constructor of the `ExperimentSignal` accepts calibration values of an `ExperimentSignalCalibration` and forwards them internally when assigning a `ExperimentSignalCalibration` object to the `calibration` property:" ] }, { @@ -480,7 +488,7 @@ " ),\n", " ExperimentSignal(uid=\"q0_acquire\", oscillator=Oscillator(frequency=3.0e6)),\n", " ],\n", - ")\n" + ")" ] }, { @@ -530,7 +538,7 @@ " uid=\"q0_acquire\", calibration=SignalCalibration(Oscillator(frequency=3.0e6))\n", " ),\n", " ],\n", - ")\n" + ")" ] }, { @@ -571,7 +579,7 @@ " ),\n", ")\n", "\n", - "exp.signals[\"q0_acquire\"].calibration = SignalCalibration(Oscillator(frequency=3.0e6))\n" + "exp.signals[\"q0_acquire\"].calibration = SignalCalibration(Oscillator(frequency=3.0e6))" ] }, { @@ -611,7 +619,7 @@ ")\n", "\n", "es = exp.signals[\"q0_acquire\"]\n", - "es.oscillator = Oscillator(frequency=3.0e6)\n" + "es.oscillator = Oscillator(frequency=3.0e6)" ] }, { @@ -643,7 +651,7 @@ " ),\n", ")\n", "exp_calib[\"q0_measure\"] = SignalCalibration(Oscillator(frequency=2.0e6))\n", - "exp_calib[\"q0_acquire\"] = SignalCalibration(Oscillator(frequency=3.0e6))\n" + "exp_calib[\"q0_acquire\"] = SignalCalibration(Oscillator(frequency=3.0e6))" ] }, { @@ -660,7 +668,7 @@ "metadata": {}, "outputs": [], "source": [ - "exp.set_calibration(exp_calib)\n" + "exp.set_calibration(exp_calib)" ] }, { @@ -690,7 +698,7 @@ "\n", "print(\"\\nSet experiment calibration:\")\n", "exp.set_calibration(exp_calib)\n", - "print(exp.get_calibration())\n" + "print(exp.get_calibration())" ] }, { @@ -719,7 +727,7 @@ "assert exp_calib != exp_calib_copy\n", "assert exp_calib_copy != exp.get_calibration()\n", "exp.set_calibration(exp_calib)\n", - "assert exp_calib == exp.get_calibration()\n" + "assert exp_calib == exp.get_calibration()" ] }, { @@ -737,7 +745,7 @@ "outputs": [], "source": [ "exp.signals[\"q0_drive\"].oscillator.frequency = 1.0\n", - "exp.signals[\"q0_drive\"].voltage_offsets = [2]\n" + "exp.signals[\"q0_drive\"].voltage_offsets = [2]" ] }, { @@ -781,7 +789,7 @@ " ],\n", " ),\n", " ],\n", - ")\n" + ")" ] }, { @@ -803,7 +811,7 @@ "q0 = device_setup.logical_signal_groups[\"q0\"]\n", "exp.map_signal(\"q0_drive\", q0.logical_signals[\"drive_line\"])\n", "exp.map_signal(\"q0_measure\", q0.logical_signals[\"measure_line\"])\n", - "exp.map_signal(\"q0_acquire\", q0.logical_signals[\"acquire_line\"])\n" + "exp.map_signal(\"q0_acquire\", q0.logical_signals[\"acquire_line\"])" ] }, { @@ -822,7 +830,7 @@ "source": [ "exp.map_signal(\"q0_drive\", \"/logical_signal_groups/q0/drive_line\")\n", "exp.map_signal(\"q0_measure\", \"/logical_signal_groups/q0/measure_line\")\n", - "exp.map_signal(\"q0_acquire\", \"/logical_signal_groups/q0/acquire_line\")\n" + "exp.map_signal(\"q0_acquire\", \"/logical_signal_groups/q0/acquire_line\")" ] }, { @@ -847,7 +855,7 @@ " \"q0_measure\": \"/logical_signal_groups/q0/measure_line\",\n", " \"q0_acquire\": \"/logical_signal_groups/q0/acquire_line\",\n", " }\n", - ")\n" + ")" ] }, { @@ -875,7 +883,7 @@ " \"q0_measure\": q0.logical_signals[\"measure_line\"],\n", " \"q0_acquire\": q0.logical_signals[\"acquire_line\"],\n", " }\n", - ")\n" + ")" ] }, { @@ -903,7 +911,7 @@ " \"q0_measure\": q0.logical_signals[\"measure_line\"].path,\n", " \"q0_acquire\": q0.logical_signals[\"acquire_line\"].path,\n", " }\n", - ")\n" + ")" ] }, { @@ -962,7 +970,7 @@ " }\n", ")\n", "pprint(exp.get_signal_map())\n", - "pprint(exp.signal_mapping_status)\n" + "pprint(exp.signal_mapping_status)" ] }, { @@ -982,7 +990,7 @@ "device_calib = device_setup.get_calibration()\n", "device_calib.save(\"my_device_calib.json\")\n", "device_calib_loaded = Calibration.load(\"my_device_calib.json\")\n", - "assert device_calib == device_calib_loaded\n" + "assert device_calib == device_calib_loaded" ] }, { @@ -994,7 +1002,7 @@ "exp_calib = exp.get_calibration()\n", "exp_calib.save(\"my_exp_calib.json\")\n", "exp_calib_loaded = Calibration.load(\"my_exp_calib.json\")\n", - "assert exp_calib == exp_calib_loaded\n" + "assert exp_calib == exp_calib_loaded" ] } ], diff --git a/examples/00_reference/02_experiment_definition_reference.ipynb b/examples/00_reference/02_experiment_definition_reference.ipynb index 9fac3f1..df5cb5c 100644 --- a/examples/00_reference/02_experiment_definition_reference.ipynb +++ b/examples/00_reference/02_experiment_definition_reference.ipynb @@ -6,7 +6,19 @@ "id": "771e7eff", "metadata": {}, "source": [ - "# Prerequisites" + "# Experiment Reference\n", + "\n", + "In this notebook, you'll see how you can build up an experiment piece by piece." + ] + }, + { + "cell_type": "markdown", + "id": "5c0205b5", + "metadata": {}, + "source": [ + "# Imports, device setup, and example experiment\n", + "\n", + "You'll start by importing LabOne Q, defining your descriptor, providing a calibration, and creating a device setup." ] }, { @@ -22,7 +34,7 @@ "outputs": [], "source": [ "# convenience import for all LabOne Q software functionality\n", - "from laboneq.simple import *\n" + "from laboneq.simple import *" ] }, { @@ -70,7 +82,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -162,7 +174,7 @@ " \"acquire_line\"\n", " ].oscillator = Oscillator(\n", " uid=\"acquire_q1_osc\", frequency=1e8, modulation_type=ModulationType.SOFTWARE\n", - " )\n" + " )" ] }, { @@ -183,7 +195,17 @@ " server_port=\"8004\",\n", " setup_name=\"ZI_QCCS\",\n", ")\n", - "calibrate_devices(device_setup)\n" + "calibrate_devices(device_setup)" + ] + }, + { + "cell_type": "markdown", + "id": "9a712bcc", + "metadata": {}, + "source": [ + "## Example experiment\n", + "\n", + "After creating your device setup, you can run an example experiment, e.g., this resonator spectroscopy." ] }, { @@ -252,7 +274,7 @@ " exp.play(\n", " signal=\"q0_measure\",\n", " pulse=pulse_library.const(length=1e-6, amplitude=0.0),\n", - " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error\n" + " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error" ] }, { @@ -269,7 +291,7 @@ "source": [ "session = Session(device_setup=device_setup)\n", "session.connect(do_emulation=True)\n", - "my_results = session.run(exp)\n" + "my_results = session.run(exp)" ] }, { @@ -278,7 +300,9 @@ "id": "c6460f9b", "metadata": {}, "source": [ - "# Experiment Instantiation" + "# Experiment Instantiation\n", + "\n", + "You'll now create an experiment from scratch, going through each step." ] }, { @@ -300,7 +324,7 @@ "exp = Experiment(\n", " uid=\"MyExperiment\",\n", " signals=[ExperimentSignal(uid=\"q0_measure\"), ExperimentSignal(uid=\"q0_acquire\")],\n", - ")\n" + ")" ] }, { @@ -315,7 +339,7 @@ }, "outputs": [], "source": [ - "exp.add_signal(uid=\"q0_drive\")\n" + "exp.add_signal(uid=\"q0_drive\")" ] }, { @@ -330,7 +354,7 @@ }, "outputs": [], "source": [ - "exp.experiment_signals_uids()\n" + "exp.experiment_signals_uids()" ] }, { @@ -345,7 +369,7 @@ }, "outputs": [], "source": [ - "exp.is_experiment_signal(\"q0_drive\")\n" + "exp.is_experiment_signal(\"q0_drive\")" ] }, { @@ -370,7 +394,7 @@ "exp.map_signal(\n", " \"q0_acquire\",\n", " device_setup.logical_signal_groups[\"q0\"].logical_signals[\"acquire_line\"],\n", - ")\n" + ")" ] }, { @@ -386,7 +410,7 @@ }, "outputs": [], "source": [ - "exp.signal_mapping_status\n" + "exp.signal_mapping_status" ] }, { @@ -395,7 +419,7 @@ "id": "91e39f06", "metadata": {}, "source": [ - "# DSL Sections" + "# Sections" ] }, { @@ -404,7 +428,7 @@ "id": "7134963f", "metadata": {}, "source": [ - "## Simple Section" + "## Simple Section" ] }, { @@ -424,7 +448,7 @@ " pass\n", "# with exp.section(uid=\"qubit_excitation\"): # Nest one section in another and inherit exec_type from parent section\n", "# e.g. play pulses here\n", - "# pass\n" + "# pass" ] }, { @@ -433,7 +457,7 @@ "id": "7a33f47f", "metadata": {}, "source": [ - "## Access Current Section" + "## Access Current Section" ] }, { @@ -454,7 +478,7 @@ " play_x90_on_drive(s)\n", "\n", "with exp.section(uid=\"conclude\") as s:\n", - " play_x90_on_drive(s)\n" + " play_x90_on_drive(s)" ] }, { @@ -484,7 +508,7 @@ ")\n", "\n", "with exp.sweep(uid=\"single_sweep\", parameter=sweep_parameter_1):\n", - " pass\n" + " pass" ] }, { @@ -509,7 +533,7 @@ "\n", "with exp.sweep(uid=\"sweep_first_d\", parameter=sweep_parameter_1):\n", " with exp.sweep(uid=\"sweep_second_d\", parameter=sweep_parameter_2):\n", - " pass\n" + " pass" ] }, { @@ -533,7 +557,7 @@ ")\n", "\n", "with exp.sweep(uid=\"multi_sweep\", parameter=[sweep_parameter_1, sweep_parameter_2]):\n", - " pass\n" + " pass" ] }, { @@ -561,7 +585,7 @@ " uid=\"acquire_loop_nt\", count=10, averaging_mode=AveragingMode.SEQUENTIAL\n", "):\n", " pass\n", - "# TODO: parameter argument?\n" + "# TODO: parameter argument?" ] }, { @@ -570,7 +594,7 @@ "id": "fd8bb93d", "metadata": {}, "source": [ - "## Averaging Acquire Loop in real time" + "## Averaging Acquire Loop in real time" ] }, { @@ -593,8 +617,7 @@ " repetition_mode=RepetitionMode.CONSTANT,\n", " repetition_time=1e-06,\n", "):\n", - " pass\n", - "# TODO: parameter argument?\n" + " pass" ] }, { @@ -603,7 +626,7 @@ "id": "eef26c14", "metadata": {}, "source": [ - "# DSL Operations in near time" + "# Operations in near time" ] }, { @@ -627,7 +650,7 @@ }, "outputs": [], "source": [ - "# exp.call()\n" + "# exp.call()" ] }, { @@ -651,7 +674,7 @@ }, "outputs": [], "source": [ - "# exp.set_node(path='', value=value)\n" + "# exp.set_node(path='', value=value)" ] }, { @@ -660,7 +683,7 @@ "id": "b05e9379", "metadata": {}, "source": [ - "# DSL Operations in real time" + "# Operations in real time" ] }, { @@ -669,7 +692,7 @@ "id": "0888e471", "metadata": {}, "source": [ - "## Play Pulse" + "## Play Pulses" ] }, { @@ -688,7 +711,7 @@ " exp.play(\n", " signal=\"q0_drive\",\n", " pulse=pulse_library.gaussian(uid=\"x90\", length=100e-9, amplitude=1.0),\n", - " )\n" + " )" ] }, { @@ -697,7 +720,7 @@ "id": "bfdea064", "metadata": {}, "source": [ - "## Acquire Signal" + "## Acquire Signals" ] }, { @@ -728,7 +751,7 @@ " signal=\"q1_acquire\", handle=\"handle3\", kernel=qubit1_kernel\n", " ) # Add Acquire() meta-info e.g. \"kernel\" to the handle?\n", "\n", - "# my_results = results.get(result_buffer=\"handle\")\n" + "# my_results = results.get(result_buffer=\"handle\")" ] }, { @@ -753,7 +776,7 @@ "outputs": [], "source": [ "with exp.section(uid=\"delay\"):\n", - " exp.delay(signal=\"q0_drive\", time=1e-03)\n" + " exp.delay(signal=\"q0_drive\", time=1e-03)" ] }, { @@ -778,7 +801,7 @@ "outputs": [], "source": [ "with exp.section(uid=\"reserve\"):\n", - " exp.reserve(\"q0_drive\")\n" + " exp.reserve(\"q0_drive\")" ] }, { diff --git a/examples/00_reference/03_section_tutorial.ipynb b/examples/00_reference/03_section_tutorial.ipynb index 67eadfb..21ae541 100644 --- a/examples/00_reference/03_section_tutorial.ipynb +++ b/examples/00_reference/03_section_tutorial.ipynb @@ -5,15 +5,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# LabOne Q Sections" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this notebook we'll build up the concept of Sections, following along with the Section chapter in the LabOne Q Manual. In the first example, we'll go step by step through each part of defining and running your experiment. In the subsequent examples, we'll focus on the section differences themselves.\n", + "# Section Tutorial\n", + "\n", + "In this notebook you'll build up experiments with the LabOne Q concept of Sections, following along with the Section chapter in the Manual. In the first example, you'll go step by step through each part of defining and running your experiment. In the subsequent examples, you'll focus on the differences between the sections themselves.\n", "\n", "By the end of this notebook, you will have constructed a Ramsey sequence, and you'll see how you can control the timing of your experiment by manipulating sections, their properties, and their contents.\n", "\n", @@ -48,7 +42,7 @@ ")\n", "from laboneq.contrib.example_helpers.descriptors.shfsg_shfqa_pqsc import (\n", " descriptor_shfsg_shfqa_pqsc,\n", - ")\n" + ")" ] }, { @@ -80,7 +74,7 @@ " server_host=\"my_ip_address\", # ip address of the LabOne dataserver used to communicate with the instruments\n", " server_port=\"8004\", # port number of the dataserver - default is 8004\n", " setup_name=\"my_QCCS_setup\", # setup name\n", - ")\n" + ")" ] }, { @@ -97,7 +91,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.list_calibratables()\n" + "device_setup.list_calibratables()" ] }, { @@ -145,7 +139,7 @@ " oscillator=drive_q0_if,\n", " local_oscillator=drive_q0_q1_lo,\n", " range=10,\n", - " )\n" + " )" ] }, { @@ -162,7 +156,7 @@ "metadata": {}, "outputs": [], "source": [ - "calibrate_devices(device_setup)\n" + "calibrate_devices(device_setup)" ] }, { @@ -179,7 +173,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.list_calibratables()\n" + "device_setup.list_calibratables()" ] }, { @@ -220,7 +214,7 @@ " # Left-aligned section of fixed length\n", " with exp.section(uid=\"excitation\", length=2e-6, alignment=SectionAlignment.LEFT):\n", " # Section contents\n", - " exp.play(signal=\"drive\", pulse=x90)\n" + " exp.play(signal=\"drive\", pulse=x90)" ] }, { @@ -251,7 +245,7 @@ "}\n", "\n", "# set signal map\n", - "exp.set_signal_map(map_q0_drive)\n" + "exp.set_signal_map(map_q0_drive)" ] }, { @@ -268,7 +262,7 @@ "metadata": {}, "outputs": [], "source": [ - "exp.get_signal_map()\n" + "exp.get_signal_map()" ] }, { @@ -297,7 +291,7 @@ "source": [ "session = Session(device_setup=device_setup)\n", "session.connect(do_emulation=True)\n", - "compiled_exp = session.compile(exp)\n" + "compiled_exp = session.compile(exp)" ] }, { @@ -322,7 +316,7 @@ "metadata": {}, "outputs": [], "source": [ - "show_pulse_sheet(\"1_Section_Intro\", compiled_exp)\n" + "show_pulse_sheet(\"1_Section_Intro\", compiled_exp)" ] }, { @@ -347,7 +341,7 @@ "metadata": {}, "outputs": [], "source": [ - "print(compiled_exp.src[0][\"text\"])\n" + "print(compiled_exp.src[0][\"text\"])" ] }, { @@ -372,7 +366,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_results = session.run(compiled_exp)\n" + "my_results = session.run(compiled_exp)" ] }, { @@ -426,7 +420,7 @@ " # Left-aligned section of fixed length\n", " with exp.section(uid=\"excitation\", length=2e-6, alignment=SectionAlignment.RIGHT):\n", " # Section contents\n", - " exp.play(signal=\"drive\", pulse=x90)\n" + " exp.play(signal=\"drive\", pulse=x90)" ] }, { @@ -445,7 +439,7 @@ "my_results = session.run()\n", "\n", "# generate the pulse sheet\n", - "show_pulse_sheet(\"2_Right_Alignment\", session.compiled_experiment)\n" + "show_pulse_sheet(\"2_Right_Alignment\", session.compiled_experiment)" ] }, { @@ -483,7 +477,7 @@ " # Left Aligned section of fixed length\n", " with exp.section(uid=\"excitation\", alignment=SectionAlignment.RIGHT):\n", " # Section contents\n", - " exp.play(signal=\"drive\", pulse=x90)\n" + " exp.play(signal=\"drive\", pulse=x90)" ] }, { @@ -502,7 +496,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"3_No_Specified_Length\", session.compiled_experiment)\n" + "show_pulse_sheet(\"3_No_Specified_Length\", session.compiled_experiment)" ] }, { @@ -542,7 +536,7 @@ " # Section contents\n", " exp.play(signal=\"drive\", pulse=x90)\n", " exp.delay(signal=\"drive\", time=100e-9)\n", - " exp.play(signal=\"drive\", pulse=x90)\n" + " exp.play(signal=\"drive\", pulse=x90)" ] }, { @@ -561,7 +555,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"4_Signal_Delay\", session.compiled_experiment)\n" + "show_pulse_sheet(\"4_Signal_Delay\", session.compiled_experiment)" ] }, { @@ -605,7 +599,7 @@ " exp.play(signal=\"drive\", pulse=x90)\n", " exp.play(signal=\"drive1\", pulse=x180)\n", " exp.delay(signal=\"drive1\", time=50e-9)\n", - " exp.play(signal=\"drive1\", pulse=x90)\n" + " exp.play(signal=\"drive1\", pulse=x90)" ] }, { @@ -618,7 +612,7 @@ "map_q0_q1_drive = {\n", " \"drive\": device_setup.logical_signal_groups[\"q0\"].logical_signals[\"drive_line\"],\n", " \"drive1\": device_setup.logical_signal_groups[\"q1\"].logical_signals[\"drive_line\"],\n", - "}\n" + "}" ] }, { @@ -643,7 +637,7 @@ " oscillator=drive_q1_if,\n", " local_oscillator=drive_q0_q1_lo,\n", " range=10,\n", - " )\n" + " )" ] }, { @@ -653,7 +647,7 @@ "outputs": [], "source": [ "# apply the new calibration\n", - "calibrate_devices_drive1(device_setup)\n" + "calibrate_devices_drive1(device_setup)" ] }, { @@ -662,7 +656,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.list_calibratables()\n" + "device_setup.list_calibratables()" ] }, { @@ -681,7 +675,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"5_Section_Two_Lines\", session.compiled_experiment)\n" + "show_pulse_sheet(\"5_Section_Two_Lines\", session.compiled_experiment)" ] }, { @@ -719,7 +713,7 @@ " # Section contents\n", " exp.play(signal=\"drive1\", pulse=x180)\n", " exp.delay(signal=\"drive1\", time=50e-9)\n", - " exp.play(signal=\"drive1\", pulse=x90)\n" + " exp.play(signal=\"drive1\", pulse=x90)" ] }, { @@ -738,7 +732,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"6_Two_Sections\", session.compiled_experiment)\n" + "show_pulse_sheet(\"6_Two_Sections\", session.compiled_experiment)" ] }, { @@ -780,7 +774,7 @@ " # Section contents\n", " exp.play(signal=\"drive1\", pulse=x180)\n", " exp.delay(signal=\"drive1\", time=50e-9)\n", - " exp.play(signal=\"drive1\", pulse=x90)\n" + " exp.play(signal=\"drive1\", pulse=x90)" ] }, { @@ -799,7 +793,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"7_Nested_Sections\", session.compiled_experiment)\n" + "show_pulse_sheet(\"7_Nested_Sections\", session.compiled_experiment)" ] }, { @@ -845,7 +839,7 @@ " exp.delay(signal=\"drive1\", time=50e-9)\n", " exp.play(signal=\"drive1\", pulse=x90)\n", " exp.add(section=excitation1)\n", - " exp.add(section=excitation1)\n" + " exp.add(section=excitation1)" ] }, { @@ -864,7 +858,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"8_Reusing_Sections\", session.compiled_experiment)\n" + "show_pulse_sheet(\"8_Reusing_Sections\", session.compiled_experiment)" ] }, { @@ -907,7 +901,7 @@ " # Left-aligned section with 50 ns length\n", " exp.add(section=my_section)\n", " exp.add(section=my_section)\n", - " exp.add(section=my_section)\n" + " exp.add(section=my_section)" ] }, { @@ -926,7 +920,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"9_Reusing_Sections_Alternative\", session.compiled_experiment)\n" + "show_pulse_sheet(\"9_Reusing_Sections_Alternative\", session.compiled_experiment)" ] }, { @@ -975,7 +969,7 @@ " exp.delay(signal=\"drive1\", time=50e-9)\n", " exp.play(signal=\"drive1\", pulse=x90)\n", " exp.add(section=excitation1)\n", - " exp.add(section=excitation1)\n" + " exp.add(section=excitation1)" ] }, { @@ -994,7 +988,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"10_Play_After\", session.compiled_experiment)\n" + "show_pulse_sheet(\"10_Play_After\", session.compiled_experiment)" ] }, { @@ -1040,7 +1034,7 @@ " exp.play(signal=\"drive1\", pulse=x180)\n", " exp.delay(signal=\"drive1\", time=50e-9)\n", " exp.play(signal=\"drive1\", pulse=x90)\n", - " exp.add(section=excitation1)\n" + " exp.add(section=excitation1)" ] }, { @@ -1059,7 +1053,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"11_Reserving_Signals\", session.compiled_experiment)\n" + "show_pulse_sheet(\"11_Reserving_Signals\", session.compiled_experiment)" ] }, { @@ -1108,7 +1102,7 @@ "\n", "time_sweep = LinearSweepParameter(\n", " uid=\"time_sweep_param\", start=start_delay, stop=stop_delay, count=n_steps\n", - ")\n" + ")" ] }, { @@ -1172,7 +1166,7 @@ " )\n", " with exp_ramsey.section(uid=\"delay\", length=1e-6):\n", " # relax time after readout - for qubit relaxation to groundstate and signal processing\n", - " exp_ramsey.reserve(signal=\"measure\")\n" + " exp_ramsey.reserve(signal=\"measure\")" ] }, { @@ -1202,7 +1196,7 @@ " \"drive\": device_setup.logical_signal_groups[\"q0\"].logical_signals[\"drive_line\"],\n", " \"measure\": device_setup.logical_signal_groups[\"q0\"].logical_signals[\"measure_line\"],\n", " \"acquire\": device_setup.logical_signal_groups[\"q0\"].logical_signals[\"acquire_line\"],\n", - "}\n" + "}" ] }, { @@ -1251,7 +1245,7 @@ " port_delay=0,\n", " local_oscillator=readout_q0_lo,\n", " range=10,\n", - " )\n" + " )" ] }, { @@ -1260,7 +1254,7 @@ "metadata": {}, "outputs": [], "source": [ - "calibrate_devices_readout(device_setup)\n" + "calibrate_devices_readout(device_setup)" ] }, { @@ -1269,7 +1263,7 @@ "metadata": {}, "outputs": [], "source": [ - "device_setup.list_calibratables()\n" + "device_setup.list_calibratables()" ] }, { @@ -1288,7 +1282,7 @@ "my_results = session.run()\n", "\n", "# show pulse sheet\n", - "show_pulse_sheet(\"12_Ramsey\", session.compiled_experiment)\n" + "show_pulse_sheet(\"12_Ramsey\", session.compiled_experiment)" ] }, { diff --git a/examples/00_reference/05_pulse_inspector_plotter.ipynb b/examples/00_reference/05_pulse_inspector_plotter.ipynb index 3ac6cec..9aa5f16 100644 --- a/examples/00_reference/05_pulse_inspector_plotter.ipynb +++ b/examples/00_reference/05_pulse_inspector_plotter.ipynb @@ -1,5 +1,12 @@ { "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Pulse Inspector and Bloch Simulator" + ] + }, { "cell_type": "code", "execution_count": null, @@ -13,7 +20,7 @@ "from laboneq.dsl.experiment import pulse_library\n", "from laboneq.contrib.bloch_simulator_pulse_plotter.inspector.update_inspect import (\n", " pulse_inspector,\n", - ")\n" + ")" ] }, { @@ -34,7 +41,7 @@ "my_pulse = pulse_library.gaussian(uid=\"my_pulse\", length=100e-9, amplitude=1.0)\n", "\n", "## update pulse with spectral window and flip angle and subsequently, peak amplitude\n", - "my_pulse = pulse_update(my_pulse, spectral_window=200e6, flip_angle=180)\n" + "my_pulse = pulse_update(my_pulse, spectral_window=200e6, flip_angle=180)" ] }, { @@ -44,7 +51,7 @@ "outputs": [], "source": [ "## plot IQ quadratures - is default (corresponds to output of instruments)\n", - "pulse_inspector(my_pulse)\n" + "pulse_inspector(my_pulse)" ] }, { @@ -54,7 +61,7 @@ "outputs": [], "source": [ "## plot IQ quadratures - is default (corresponds to output of instruments)\n", - "pulse_inspector(my_pulse, iq=True)\n" + "pulse_inspector(my_pulse, iq=True)" ] }, { @@ -64,7 +71,7 @@ "outputs": [], "source": [ "## plot amplitude and phase quadratures\n", - "pulse_inspector(my_pulse, amp_phi=True)\n" + "pulse_inspector(my_pulse, amp_phi=True)" ] }, { @@ -74,7 +81,7 @@ "outputs": [], "source": [ "# plot frequency response / Bloch simulation of pulse\n", - "pulse_inspector(my_pulse, response=True)\n" + "pulse_inspector(my_pulse, response=True)" ] }, { @@ -96,7 +103,7 @@ " uid=\"my_pulse\", length=100e-9, amplitude=1.0, width=80e-9\n", ")\n", "\n", - "my_pulse = pulse_update(my_pulse, spectral_window=200e6, flip_angle=180)\n" + "my_pulse = pulse_update(my_pulse, spectral_window=200e6, flip_angle=180)" ] }, { @@ -106,7 +113,7 @@ "outputs": [], "source": [ "## nothing has been provided\n", - "pulse_inspector(my_pulse)\n" + "pulse_inspector(my_pulse)" ] }, { @@ -116,7 +123,7 @@ "outputs": [], "source": [ "## plot IQ quadratures - is default (corresponds to output of instruments)\n", - "pulse_inspector(my_pulse, iq=True)\n" + "pulse_inspector(my_pulse, iq=True)" ] }, { @@ -126,7 +133,7 @@ "outputs": [], "source": [ "## plot amplitude and phase quadratures\n", - "pulse_inspector(my_pulse, amp_phi=True)\n" + "pulse_inspector(my_pulse, amp_phi=True)" ] }, { @@ -136,7 +143,7 @@ "outputs": [], "source": [ "## plot frequency response / Bloch simulation of pulse\n", - "pulse_inspector(my_pulse, response=True)\n" + "pulse_inspector(my_pulse, response=True)" ] }, { @@ -161,7 +168,7 @@ " spectral_window=200e6,\n", " flip_angle=180,\n", " pulse_parameters=my_pulse.pulse_parameters,\n", - ")\n" + ")" ] }, { @@ -171,7 +178,7 @@ "outputs": [], "source": [ "## nothing has been provided\n", - "pulse_inspector(my_pulse)\n" + "pulse_inspector(my_pulse)" ] }, { @@ -181,7 +188,7 @@ "outputs": [], "source": [ "## plot IQ quadratures - is default (corresponds to output of instruments)\n", - "pulse_inspector(my_pulse, iq=True)\n" + "pulse_inspector(my_pulse, iq=True)" ] }, { @@ -191,7 +198,7 @@ "outputs": [], "source": [ "## plot amplitude and phase quadratures\n", - "pulse_inspector(my_pulse, amp_phi=True)\n" + "pulse_inspector(my_pulse, amp_phi=True)" ] }, { @@ -203,7 +210,7 @@ "outputs": [], "source": [ "## plot I and Q and amplitude and phase quadratures\n", - "pulse_inspector(my_pulse, amp_phi=True, iq=True)\n" + "pulse_inspector(my_pulse, amp_phi=True, iq=True)" ] }, { @@ -213,7 +220,7 @@ "outputs": [], "source": [ "## plot frequency response / Bloch simulation of pulse\n", - "pulse_inspector(my_pulse, response=True)\n" + "pulse_inspector(my_pulse, response=True)" ] }, { diff --git a/examples/00_reference/07_waveform_replacement.ipynb b/examples/00_reference/07_waveform_replacement.ipynb index 9c06ee6..289073e 100644 --- a/examples/00_reference/07_waveform_replacement.ipynb +++ b/examples/00_reference/07_waveform_replacement.ipynb @@ -5,7 +5,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Waveform Replacement Demo\n", + "# Waveform Replacement\n", "\n", "In this notebook, you'll learn how to use a [callback function](https://docs.zhinst.com/labone_q_user_manual/concepts/callback_functions.html) in a [near-time sweep](https://docs.zhinst.com/labone_q_user_manual/concepts/averaging_sweeping.html#labone_q.func_concepts.experiment.averaging_sweeping.real_near_time) to perform a waveform replacement experiment. This kind of functionality can be adapted to your own experiment, e.g., VQE or optimal control." ] @@ -33,7 +33,7 @@ "\n", "# Other imports\n", "from pathlib import Path\n", - "import numpy as np\n" + "import numpy as np" ] }, { @@ -64,7 +64,7 @@ "lsg = {\n", " qubit_name: device_setup.logical_signal_groups[qubit_name].logical_signals\n", " for qubit_name in device_setup.logical_signal_groups.keys()\n", - "}\n" + "}" ] }, { @@ -86,7 +86,7 @@ "\n", "# create and connect to a session\n", "session = Session(device_setup=device_setup)\n", - "session.connect(do_emulation=emulate)\n" + "session.connect(do_emulation=emulate)" ] }, { @@ -147,7 +147,7 @@ " return res\n", "\n", "\n", - "p_flattop = flattop_gaussian(uid=\"flattop\", length=pulse_time, amplitude=1)\n" + "p_flattop = flattop_gaussian(uid=\"flattop\", length=pulse_time, amplitude=1)" ] }, { @@ -184,7 +184,7 @@ " print(f\"{idx} Third replacement: p_drag replaced by p_const\")\n", " return\n", " print(idx)\n", - " return\n" + " return" ] }, { @@ -244,7 +244,7 @@ " exp.delay(signal=\"drive\", time=0.5)\n", "\n", " exp.call(user_func_to_replace_pulse, idx=instance_idx)\n", - " return exp\n" + " return exp" ] }, { @@ -265,7 +265,7 @@ "session.register_user_function(user_func_to_replace_pulse)\n", "\n", "# compile\n", - "comp_waveform_replacement = session.compile(exp_waveform_exchange(30))\n" + "comp_waveform_replacement = session.compile(exp_waveform_exchange(30))" ] }, { @@ -284,7 +284,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(comp_waveform_replacement, 0, 1e-6)\n" + "plot_simulation(comp_waveform_replacement, 0, 1e-6)" ] }, { @@ -302,7 +302,7 @@ "outputs": [], "source": [ "Path(\"Pulse_sheets\").mkdir(parents=True, exist_ok=True)\n", - "show_pulse_sheet(\"Pulse_sheets/waveform_replacement\", comp_waveform_replacement)\n" + "show_pulse_sheet(\"Pulse_sheets/waveform_replacement\", comp_waveform_replacement)" ] }, { @@ -320,7 +320,7 @@ "outputs": [], "source": [ "# run the compiled experiemnt\n", - "waveform_replacement_results = session.run(comp_waveform_replacement)\n" + "waveform_replacement_results = session.run(comp_waveform_replacement)" ] } ], diff --git a/examples/00_reference/08_node_sweeping.ipynb b/examples/00_reference/08_node_sweeping.ipynb index 8162eef..a5e5e02 100644 --- a/examples/00_reference/08_node_sweeping.ipynb +++ b/examples/00_reference/08_node_sweeping.ipynb @@ -8,7 +8,7 @@ "code_folding": [] }, "source": [ - "# Node set in Near-Time Loop" + "# Setting nodes in a near-Time Loop" ] }, { @@ -28,7 +28,7 @@ "outputs": [], "source": [ "# convenience import for all LabOne Q software functionality\n", - "from laboneq.simple import *\n" + "from laboneq.simple import *" ] }, { @@ -80,7 +80,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -169,7 +169,7 @@ " \"acquire_line\"\n", " ].oscillator = Oscillator(\n", " uid=\"acquire_q1_osc\", frequency=1e8, modulation_type=ModulationType.SOFTWARE\n", - " )\n" + " )" ] }, { @@ -194,7 +194,7 @@ " server_port=\"8004\",\n", " setup_name=\"ZI_QCCS\",\n", ")\n", - "calibrate_devices(device_setup)\n" + "calibrate_devices(device_setup)" ] }, { @@ -282,7 +282,7 @@ " exp.play(\n", " signal=\"q0_measure\",\n", " pulse=pulse_library.const(length=1e-6, amplitude=0.0),\n", - " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error\n" + " ) # KNOWN ISSUE: Need to wait for 1us to prevent QA holdoff error" ] }, { @@ -305,7 +305,7 @@ "source": [ "session = Session(device_setup)\n", "session.connect(do_emulation=True)\n", - "my_results = session.run(exp)\n" + "my_results = session.run(exp)" ] }, { @@ -324,7 +324,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Nothing specific to the nodes set functionality\n" + "# Nothing specific to the nodes set functionality" ] } ], diff --git a/examples/00_reference/10_database_interface.ipynb b/examples/00_reference/10_database_interface.ipynb index c00bab5..bf6722a 100755 --- a/examples/00_reference/10_database_interface.ipynb +++ b/examples/00_reference/10_database_interface.ipynb @@ -64,7 +64,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_db = L1QDatabase()\n" + "my_db = DataStore()\n" ] }, { @@ -83,7 +83,7 @@ "source": [ "custom_db_path = \"laboneq_data/custom_database.db\"\n", "\n", - "my_custom_db = L1QDatabase(custom_db_path)\n" + "my_custom_db = DataStore(custom_db_path)\n" ] }, { diff --git a/examples/01_qubit_characterization/04_propagation_delay.ipynb b/examples/01_qubit_characterization/04_propagation_delay.ipynb index bba4ac4..f7aebe6 100644 --- a/examples/01_qubit_characterization/04_propagation_delay.ipynb +++ b/examples/01_qubit_characterization/04_propagation_delay.ipynb @@ -87,7 +87,7 @@ "outputs": [], "source": [ "delay_sweep = LinearSweepParameter(\n", - " uid=\"delay_sweep_param\", start=0, stop=1.7e-6, count=21\n", + " uid=\"delay_sweep_param\", start=0, stop=1.0e-6, count=21\n", ")\n", "\n", "# define number of averages\n", @@ -102,28 +102,6 @@ ")\n" ] }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now you'll create a reference to the SHFQA channel so that the integration delay node of the instrument can be swept." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "shfqa_address = []\n", - "\n", - "for i in range(len(device_setup.instruments)):\n", - " if \"QA\" in str(device_setup.instruments[i]):\n", - " # print(device_setup.instruments[i].address)\n", - " shfqa_address.append(device_setup.instruments[i].address)\n" - ] - }, { "attachments": {}, "cell_type": "markdown", @@ -154,13 +132,6 @@ " ## define experimental sequence\n", " # outer loop - vary drive frequency\n", " with exp_prop_delay.sweep(uid=\"del_sweep\", parameter=delay_sweep):\n", - " # WORKAROUND: node sweeping is currently only possible when connected to an instrument\n", - " if not emulate:\n", - " exp_prop_delay.set_node(\n", - " path=f\"/{shfqa_address[0]}/qachannels/0/readout/integration/delay\",\n", - " value=delay_sweep,\n", - " )\n", - "\n", " with exp_prop_delay.acquire_loop_rt(\n", " uid=\"shots\",\n", " count=2**num_averages,\n", @@ -178,7 +149,11 @@ " # holdoff time after signal acquisition - minimum 1us required for data processing on UHFQA\n", " exp_prop_delay.delay(signal=\"measure\", time=1e-6)\n", "\n", - " return exp_prop_delay\n" + " cal = Calibration()\n", + " cal[\"acquire\"] = SignalCalibration(port_delay=delay_sweep)\n", + " exp_prop_delay.set_calibration(cal)\n", + "\n", + " return exp_prop_delay" ] }, { @@ -284,7 +259,7 @@ "kernelspec": { "display_name": "develop", "language": "python", - "name": "develop" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -296,7 +271,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.11.4" }, "orig_nbformat": 4 }, diff --git a/examples/02_advanced_qubit_experiments/00_active_qubit_reset_shfsg_shfqa_shfqc.ipynb b/examples/02_advanced_qubit_experiments/00_active_qubit_reset_shfsg_shfqa_shfqc.ipynb index e9095ae..458a0da 100644 --- a/examples/02_advanced_qubit_experiments/00_active_qubit_reset_shfsg_shfqa_shfqc.ipynb +++ b/examples/02_advanced_qubit_experiments/00_active_qubit_reset_shfsg_shfqa_shfqc.ipynb @@ -8,7 +8,7 @@ "tags": [] }, "source": [ - "# Active Qubit Reset Demonstration \n", + "# Active Qubit Reset \n", "\n", "In this notebook, we demonstrate how to execute active qubit reset, i.e. active feedback based on real-time measurement of the qubit state. \n", "We require either a SHFQC instrument for this notebook or a combination of SHFSG and SHFQA connected via a PQSC. \n", @@ -61,7 +61,7 @@ "from laboneq.contrib.example_helpers.descriptors.shfqc import descriptor_shfqc\n", "from laboneq.contrib.example_helpers.descriptors.shfsg_shfqa_pqsc import (\n", " descriptor_shfsg_shfqa_pqsc,\n", - ")\n" + ")" ] }, { @@ -70,7 +70,7 @@ "metadata": {}, "outputs": [], "source": [ - "use_emulation = True\n" + "use_emulation = True" ] }, { @@ -83,7 +83,7 @@ " \"SHFSG_FORCE_COMMAND_TABLE\": True,\n", " \"SHFSG_MIN_PLAYWAVE_HINT\": 32,\n", " \"SHFSG_MIN_PLAYZERO_HINT\": 32,\n", - "}\n" + "}" ] }, { @@ -127,7 +127,7 @@ " server_host=\"my_ip_address\", # ip address of the LabOne dataserver used to communicate with the instruments\n", " server_port=\"8004\", # port number of the dataserver - default is 8004\n", " setup_name=\"QC_standalone\", # setup name\n", - ")\n" + ")" ] }, { @@ -162,7 +162,7 @@ " \"readout_range_in\": 10,\n", " \"drive_lo_frequency\": 1.0e9, # drive LO frequencies, one center frequency per two channels\n", " \"drive_range\": 10,\n", - "}\n" + "}" ] }, { @@ -174,7 +174,7 @@ "source": [ "# define qubit object, containing all relevant information for the tuneup experiments\n", "my_parameters = QubitParameters(base_qubit_parameters)\n", - "my_qubit = Qubit(0, base_qubit_parameters)\n" + "my_qubit = Qubit(0, base_qubit_parameters)" ] }, { @@ -234,7 +234,7 @@ " ),\n", " range=my_qubit.parameters.readout_range_in,\n", " port_delay=my_qubit.parameters.readout_integration_delay,\n", - ")\n" + ")" ] }, { @@ -253,7 +253,7 @@ "my_setup.set_calibration(my_base_calibration)\n", "\n", "q0 = my_setup.logical_signal_groups[\"q0\"].logical_signals\n", - "q1 = my_setup.logical_signal_groups[\"q1\"].logical_signals\n" + "q1 = my_setup.logical_signal_groups[\"q1\"].logical_signals" ] }, { @@ -265,7 +265,7 @@ "source": [ "# create and connect to a LabOne Q session\n", "my_session = Session(device_setup=my_setup)\n", - "my_session.connect(do_emulation=use_emulation)\n" + "my_session.connect(do_emulation=use_emulation)" ] }, { @@ -313,7 +313,7 @@ " my_qubit.parameters.readout_amplitude,\n", " pulse_phase,\n", " )\n", - ")\n" + ")" ] }, { @@ -344,7 +344,7 @@ "# plt.plot(samples_kernel.real, samples_kernel.imag)\n", "plt.figure()\n", "plt.plot(samples_kernel.real)\n", - "plt.plot(samples_kernel.imag)\n" + "plt.plot(samples_kernel.imag)" ] }, { @@ -403,7 +403,7 @@ "if do_rotation:\n", " print(f\"Using threshold = {my_threshold:e} and rotation angle: {rotation_angle:e}\")\n", "else:\n", - " print(f\"Using threshold={my_threshold:e}\")\n" + " print(f\"Using threshold={my_threshold:e}\")" ] }, { @@ -417,7 +417,7 @@ " samples_kernel * np.exp(1j * rotation_angle)\n", ")\n", "\n", - "q0[\"acquire_line\"].calibration.threshold = my_threshold\n" + "q0[\"acquire_line\"].calibration.threshold = my_threshold" ] }, { @@ -469,7 +469,7 @@ " \"r\",\n", ")\n", "\n", - "print(f\"Using threshold={threshold_rot:e}\")\n" + "print(f\"Using threshold={threshold_rot:e}\")" ] }, { @@ -501,7 +501,7 @@ "s1 = r.acquired_results[\"data1\"].data\n", "\n", "plt.plot(s0.real, \".b\")\n", - "plt.plot(s1.real, \".r\")\n" + "plt.plot(s1.real, \".r\")" ] }, { @@ -607,7 +607,7 @@ " with exp.section():\n", " exp.delay(signal=\"drive\", time=pattern_delay)\n", "\n", - " return exp\n" + " return exp" ] }, { @@ -621,7 +621,7 @@ " \"measure0\": q0[\"measure_line\"],\n", " \"measure1\": q1[\"measure_line\"],\n", " \"acquire\": q0[\"acquire_line\"],\n", - "}\n" + "}" ] }, { @@ -643,7 +643,7 @@ " acquire_delay=150e-9,\n", " acquisition_type=AcquisitionType.INTEGRATION,\n", ")\n", - "my_feedback_exp.set_signal_map(my_signal_map)\n" + "my_feedback_exp.set_signal_map(my_signal_map)" ] }, { @@ -653,7 +653,7 @@ "outputs": [], "source": [ "# compile experiment\n", - "my_compiled_exp = my_session.compile(my_feedback_exp)\n" + "my_compiled_exp = my_session.compile(my_feedback_exp)" ] }, { @@ -663,7 +663,7 @@ "outputs": [], "source": [ "# run experiment and get the results\n", - "my_results = my_session.run(my_compiled_exp)\n" + "my_results = my_session.run(my_compiled_exp)" ] }, { @@ -674,7 +674,7 @@ "source": [ "# when executed in integration mode, IQ data of each state readout is still available\n", "my_data = my_results.get_data(\"qubit_state\")\n", - "my_data\n" + "my_data" ] }, { @@ -684,7 +684,7 @@ "outputs": [], "source": [ "## Look at th pulse sheet - feedback is characterised by two simultaneous sections\n", - "# show_pulse_sheet(\"feedback_experiment\", my_compiled_exp)\n" + "# show_pulse_sheet(\"feedback_experiment\", my_compiled_exp)" ] }, { @@ -694,7 +694,7 @@ "outputs": [], "source": [ "## have a look at the sequencer code for the QA unit, making the measurements\n", - "print(my_compiled_exp.src[0][\"text\"])\n" + "print(my_compiled_exp.src[0][\"text\"])" ] }, { @@ -704,7 +704,7 @@ "outputs": [], "source": [ "## have a look at the sequencer code for the SG unit, playing the feedback pulses\n", - "print(my_compiled_exp.src[1][\"text\"])\n" + "print(my_compiled_exp.src[1][\"text\"])" ] }, { diff --git a/examples/02_advanced_qubit_experiments/01_randomized_benchmarking.ipynb b/examples/02_advanced_qubit_experiments/01_randomized_benchmarking.ipynb index 4e9045e..0e94916 100644 --- a/examples/02_advanced_qubit_experiments/01_randomized_benchmarking.ipynb +++ b/examples/02_advanced_qubit_experiments/01_randomized_benchmarking.ipynb @@ -6,7 +6,7 @@ "id": "dd656230", "metadata": {}, "source": [ - "# Randomized Benchmarking with the LabOne Q Software\n", + "# Randomized Benchmarking\n", "\n", "An advanced use case example - Randomized benchmarking using the Clifford group\n", "\n", @@ -58,7 +58,7 @@ " basic_pulse_set,\n", " clifford_parametrized,\n", " generate_play_rb_pulses,\n", - ")\n" + ")" ] }, { @@ -71,7 +71,7 @@ "## hardcoded properties:\n", "GATE_LENGTH = 24e-9 # single Cliffordgate length\n", "SAMPLE_RATE = 2.0e9 # sample rate of the AWG -- would be 2.4e-9 only when pulses are played with HDAWG and readout done with UHFQA\n", - "SIGMA = 1 / 3 # shape - gaussian with width = 1/3 length\n" + "SIGMA = 1 / 3 # shape - gaussian with width = 1/3 length" ] }, { @@ -141,7 +141,7 @@ " - to: device_shfsg\n", " port: ZSYNCS/6 \n", " - internal_clock_signal\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -186,7 +186,7 @@ "lo_settings = {\n", " \"shfqa_lo\": 6.0e9, # SHFQA LO Frequency\n", " \"shfsg_lo\": 5.0e9, # SHFSG LO Frequencies, one center frequency per two channels\n", - "}\n" + "}" ] }, { @@ -263,7 +263,7 @@ " threshold=0.5,\n", " )\n", "\n", - " return my_calibration\n" + " return my_calibration" ] }, { @@ -312,7 +312,7 @@ " \"drive\": \"/logical_signal_groups/q0/drive_line\",\n", " \"measure\": \"/logical_signal_groups/q0/measure_line\",\n", " \"acquire\": \"/logical_signal_groups/q0/acquire_line\",\n", - "}\n" + "}" ] }, { @@ -334,7 +334,7 @@ "emulate = True # perform experiments in emulation mode only?\n", "\n", "my_session = Session(device_setup=my_setup)\n", - "my_session.connect(do_emulation=emulate)\n" + "my_session.connect(do_emulation=emulate)" ] }, { @@ -378,7 +378,7 @@ "# integration weights for qubit measurement\n", "readout_weighting_function = pulse_library.const(\n", " uid=\"readout_weighting_function\", length=qubit_parameters[\"ro_len\"], amplitude=1.0\n", - ")\n" + ")" ] }, { @@ -408,7 +408,7 @@ " sigma=SIGMA,\n", " sample_rate=SAMPLE_RATE,\n", ")\n", - "pulse_set = basic_pulse_set(gate_set)\n" + "pulse_set = basic_pulse_set(gate_set)" ] }, { @@ -437,7 +437,7 @@ "\n", "# the maximum sequence duration is determined by its length,\n", "# the max number of basic gates in each Clifford gate, and the length of each gate\n", - "max_seq_duration = 2**max_seq_length * 3 * gate_length\n" + "max_seq_duration = 2**max_seq_length * 3 * gate_length" ] }, { @@ -505,7 +505,7 @@ " with exp_rb.section():\n", " exp_rb.reserve(\"drive\")\n", " # relax time after readout - for qubit relaxation to groundstate and signal processing\n", - " exp_rb.delay(signal=\"measure\", time=1e-6)\n" + " exp_rb.delay(signal=\"measure\", time=1e-6)" ] }, { @@ -520,7 +520,7 @@ "\n", "# compile the experiment\n", "compiler_settings = {\"SHFSG_MIN_PLAYWAVE_HINT\": 256}\n", - "compiled_exp_rb = my_session.compile(exp_rb, compiler_settings=compiler_settings)\n" + "compiled_exp_rb = my_session.compile(exp_rb, compiler_settings=compiler_settings)" ] }, { @@ -530,7 +530,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_results = my_session.run(compiled_exp_rb)\n" + "my_results = my_session.run(compiled_exp_rb)" ] }, { @@ -553,7 +553,7 @@ "my_results.get_data(\"acq_2\")\n", "avg_meas = []\n", "for seq_length in seq_lengths:\n", - " avg_meas.append(np.mean(my_results.get_data(f\"acq_{seq_length}\")))\n" + " avg_meas.append(np.mean(my_results.get_data(f\"acq_{seq_length}\")))" ] }, { @@ -567,7 +567,7 @@ "plt.plot(seq_lengths, 1 - np.real(avg_meas))\n", "plt.xlabel(\"Sequence Length\")\n", "plt.ylabel(\"Average Fidelity\")\n", - "plt.show()\n" + "plt.show()" ] } ], diff --git a/examples/03_superconducting_qubits/00_qubit_tuneup_shfsg_shfqa_shfqc.ipynb b/examples/03_superconducting_qubits/00_qubit_tuneup_shfsg_shfqa_shfqc.ipynb index 5f22740..5a60c7c 100644 --- a/examples/03_superconducting_qubits/00_qubit_tuneup_shfsg_shfqa_shfqc.ipynb +++ b/examples/03_superconducting_qubits/00_qubit_tuneup_shfsg_shfqa_shfqc.ipynb @@ -8,7 +8,7 @@ "tags": [] }, "source": [ - "# Tune-up with LabOne Q Software with SHF Instruments\n", + "# Qubit Tune-up with SHF Instruments\n", "\n", "In this notebook we demonstrate qubit tuneup with the LabOne Q software, implemented as a sequence of experiments. \n", "\n", @@ -77,7 +77,7 @@ "import time\n", "\n", "import matplotlib.pyplot as plt\n", - "import numpy as np\n" + "import numpy as np" ] }, { @@ -140,7 +140,7 @@ " shfsg_address.append(device_setup.instruments[i].address)\n", " if \"HD\" in str(device_setup.instruments[i]):\n", " # print(device_setup.instruments[i].address)\n", - " hdawg_address.append(device_setup.instruments[i].address)\n" + " hdawg_address.append(device_setup.instruments[i].address)" ] }, { @@ -203,7 +203,7 @@ "\n", "lo_settings = {\n", " k: single_lo_settings() for k in device_setup.logical_signal_groups.keys()\n", - "}\n" + "}" ] }, { @@ -297,7 +297,7 @@ " local_oscillator=drive_lo_dict[logical_signal_group],\n", " range=5,\n", " )\n", - " return calibration\n" + " return calibration" ] }, { @@ -332,7 +332,7 @@ "# define Calibration object based on qubit control and readout parameters\n", "calibration = define_calibration(device_setup, qubit_parameters, lo_settings)\n", "# apply calibration to device setup\n", - "device_setup.set_calibration(calibration)\n" + "device_setup.set_calibration(calibration)" ] }, { @@ -358,7 +358,7 @@ "\n", "# create and connect to a session\n", "session = Session(device_setup=device_setup)\n", - "session.connect(do_emulation=emulate)\n" + "session.connect(do_emulation=emulate)" ] }, { @@ -422,7 +422,7 @@ " start=start_freq + qubit_parameters[qubit][\"ro_freq\"],\n", " stop=stop_freq + qubit_parameters[qubit][\"ro_freq\"],\n", " count=num_points,\n", - " )\n" + " )" ] }, { @@ -483,7 +483,7 @@ " # holdoff time after signal acquisition\n", " exp_spec.reserve(signal=\"measure\")\n", "\n", - " return exp_spec\n" + " return exp_spec" ] }, { @@ -520,7 +520,7 @@ " \"acquire_line\"\n", " ],\n", " }\n", - " return signal_map\n" + " return signal_map" ] }, { @@ -548,7 +548,7 @@ "exp_spec = res_spectroscopy_CW(freq_sweep, exp_settings)\n", "\n", "# set signal calibration and signal map for experiment to qubit 0\n", - "exp_spec.set_calibration(res_spec_calib(freq_sweep))\n" + "exp_spec.set_calibration(res_spec_calib(freq_sweep))" ] }, { @@ -562,7 +562,7 @@ "\n", "# run the experiment on the open instrument session\n", "compiled_res_spec = session.compile(exp_spec)\n", - "res_spec_results = session.run()\n" + "res_spec_results = session.run()" ] }, { @@ -597,7 +597,7 @@ "ax2.set_xlabel(\"Frequency (GHz)\")\n", "\n", "\n", - "plt.show()\n" + "plt.show()" ] }, { @@ -617,7 +617,7 @@ " plot=True,\n", ")\n", "opt_freq = p_opt[1]\n", - "print(f\"Resonant frequency: {opt_freq} GHz\")\n" + "print(f\"Resonant frequency: {opt_freq} GHz\")" ] }, { @@ -644,7 +644,7 @@ " # apply calibration to device setup\n", " device_setup.set_calibration(\n", " define_calibration(device_setup, qubit_parameters, lo_settings)\n", - " )\n" + " )" ] }, { @@ -704,7 +704,7 @@ " return readout_pulse\n", "\n", "\n", - "readout_pulse = create_readout_pulse(\"q0\")\n" + "readout_pulse = create_readout_pulse(\"q0\")" ] }, { @@ -748,7 +748,7 @@ " # holdoff time after signal acquisition - minimum 1us required for data processing on UHFQA\n", " exp_spec_pulsed.reserve(signal=\"measure\")\n", "\n", - " return exp_spec_pulsed\n" + " return exp_spec_pulsed" ] }, { @@ -766,7 +766,7 @@ "\n", "# set signal calibration and signal map for experiment to qubit 0\n", "exp_spec_pulsed.set_calibration(res_spec_calib(freq_sweep))\n", - "exp_spec_pulsed.set_signal_map(res_spec_map(\"q0\"))\n" + "exp_spec_pulsed.set_signal_map(res_spec_map(\"q0\"))" ] }, { @@ -781,7 +781,7 @@ "\n", "Path(\"Pulse_Sheets\").mkdir(parents=True, exist_ok=True)\n", "# generate a pulse sheet to inspect experiment befor runtime\n", - "show_pulse_sheet(\"Pulse_Sheets/Pulsed_Spectroscopy\", compiled_spec_pulsed)\n" + "show_pulse_sheet(\"Pulse_Sheets/Pulsed_Spectroscopy\", compiled_spec_pulsed)" ] }, { @@ -792,7 +792,7 @@ "outputs": [], "source": [ "# run the experiment on the open instrument session\n", - "spec_pulsed_results = session.run()\n" + "spec_pulsed_results = session.run()" ] }, { @@ -826,7 +826,7 @@ "ax2.plot(spec_freq / 1e9, np.unwrap(np.angle(spec_res)), \"orange\")\n", "ax1.set_ylabel(\"A (a.u.)\")\n", "ax2.set_ylabel(\"$\\\\phi$ (rad)\")\n", - "ax2.set_xlabel(\"Frequency (GHz)\")\n" + "ax2.set_xlabel(\"Frequency (GHz)\")" ] }, { @@ -855,7 +855,7 @@ " qubit_parameters[\"q0\"][\"ro_freq\"] = opt_freq - lo_settings[\"q0\"][\"shfqa_lo\"]\n", " device_setup.set_calibration(\n", " define_calibration(device_setup, qubit_parameters, lo_settings)\n", - " )\n" + " )" ] }, { @@ -883,7 +883,7 @@ "num_freq_points = 1001\n", "\n", "# set number of points for amplitude sweep\n", - "num_amp_points = 21\n" + "num_amp_points = 21" ] }, { @@ -936,7 +936,7 @@ " # holdoff time after signal acquisition - minimum 1us required for data processing on UHFQA\n", " exp_spec.delay(signal=\"measure\", time=1e-6)\n", "\n", - " return exp_spec\n" + " return exp_spec" ] }, { @@ -956,7 +956,7 @@ ")\n", "\n", "exp_spec_amp.set_calibration(res_spec_calib(freq_sweep))\n", - "exp_spec_amp.set_signal_map(res_spec_map(\"q0\"))\n" + "exp_spec_amp.set_signal_map(res_spec_map(\"q0\"))" ] }, { @@ -973,7 +973,7 @@ "# generate a pulse sheet to inspect experiment befor runtime\n", "show_pulse_sheet(\n", " \"Pulse_Sheets/Spectroscopy_vs_Amplitude_Pulse_Sheet\", compiled_spec_amp\n", - ")\n" + ")" ] }, { @@ -988,7 +988,7 @@ "timestamp = time.strftime(\"%Y%m%dT%H%M%S\")\n", "Path(\"Results\").mkdir(parents=True, exist_ok=True)\n", "session.save_results(f\"Results/{timestamp}_spec_amp_results.json\")\n", - "print(f\"File saved as Results/{timestamp}_spec_amp_results.json\")\n" + "print(f\"File saved as Results/{timestamp}_spec_amp_results.json\")" ] }, { @@ -1002,7 +1002,7 @@ " lo_settings[\"q0\"][\"shfqa_lo\"] + spec_amp_results.get_axis(\"res_spec_pulsed\")[1]\n", ")\n", "amp = spec_amp_results.get_axis(\"res_spec_pulsed\")[0]\n", - "data = spec_amp_results.get_data(\"res_spec_pulsed\")\n" + "data = spec_amp_results.get_data(\"res_spec_pulsed\")" ] }, { @@ -1049,7 +1049,7 @@ " ax[1].set_title(\"Phase\")\n", " ax[1].set_xlabel(\"Frequency [GHz]\")\n", " ax[1].set_ylabel(\"Normalized amplitude\")\n", - " fig.colorbar(CS2)\n" + " fig.colorbar(CS2)" ] }, { @@ -1077,7 +1077,7 @@ "\n", "# define number of averages\n", "# used for 2^num_averages, maximum: num_averages = 17\n", - "num_averages = 4\n" + "num_averages = 4" ] }, { @@ -1125,7 +1125,7 @@ " # holdoff time after signal acquisition - minimum 1us required for data processing on UHFQA\n", " exp_prop_delay.delay(signal=\"measure\", time=1e-6)\n", "\n", - " return exp_prop_delay\n" + " return exp_prop_delay" ] }, { @@ -1135,7 +1135,7 @@ "metadata": {}, "outputs": [], "source": [ - "short_readout_pulse = create_readout_pulse(\"q0\", length=600e-9, width=200e-9, sigma=0.2)\n" + "short_readout_pulse = create_readout_pulse(\"q0\", length=600e-9, width=200e-9, sigma=0.2)" ] }, { @@ -1151,7 +1151,7 @@ "\n", "exp_prop_delay = propagation_delay(short_readout_pulse, delay_sweep)\n", "\n", - "exp_prop_delay.set_signal_map(res_spec_map(\"q0\"))\n" + "exp_prop_delay.set_signal_map(res_spec_map(\"q0\"))" ] }, { @@ -1166,7 +1166,7 @@ "\n", "Path(\"Pulse_Sheets\").mkdir(parents=True, exist_ok=True)\n", "# generate a pulse sheet to inspect experiment befor runtime\n", - "show_pulse_sheet(\"Pulse_Sheets/Propagation_delay\", compiled_prop_delay)\n" + "show_pulse_sheet(\"Pulse_Sheets/Propagation_delay\", compiled_prop_delay)" ] }, { @@ -1181,7 +1181,7 @@ "timestamp = time.strftime(\"%Y%m%dT%H%M%S\")\n", "Path(\"Results\").mkdir(parents=True, exist_ok=True)\n", "session.save_results(f\"Results/{timestamp}_prop_delay_results.json\")\n", - "print(f\"File saved as Results/{timestamp}_prop_delay_results.json\")\n" + "print(f\"File saved as Results/{timestamp}_prop_delay_results.json\")" ] }, { @@ -1197,7 +1197,7 @@ "plt.plot(ax * 1e9, np.abs(dat))\n", "plt.xlabel(\"Integration delay [ns]\")\n", "plt.ylabel(\"Integration result\")\n", - "plt.show()\n" + "plt.show()" ] }, { @@ -1249,7 +1249,7 @@ " length=qubit_parameters[qubit][\"qb_len_spec\"],\n", " amplitude=qubit_parameters[qubit][\"qb_amp_spec\"],\n", " )\n", - " return pulse\n" + " return pulse" ] }, { @@ -1266,7 +1266,7 @@ "\n", "# define number of averages\n", "# used for 2^num_averages, maximum: num_averages = 17\n", - "num_averages = 10\n" + "num_averages = 10" ] }, { @@ -1330,7 +1330,7 @@ " # relax time after readout - for qubit relaxation to groundstate and signal processing\n", " exp_qspec.delay(signal=\"measure\", time=1e-6)\n", "\n", - " return exp_qspec\n" + " return exp_qspec" ] }, { @@ -1365,7 +1365,7 @@ " \"acquire_line\"\n", " ],\n", " }\n", - " return signal_map\n" + " return signal_map" ] }, { @@ -1399,7 +1399,7 @@ "\n", "# apply calibration and signal map for qubit 0\n", "exp_qspec.set_calibration(exp_calibration_q0)\n", - "exp_qspec.set_signal_map(signal_map_default(\"q0\"))\n" + "exp_qspec.set_signal_map(signal_map_default(\"q0\"))" ] }, { @@ -1416,7 +1416,7 @@ "# generate a pulse sheet to inspect experiment befor runtime\n", "show_pulse_sheet(\"Pulse_Sheets/Qubit_Spectroscopy\", compiled_qspec)\n", "\n", - "plot_simulation(compiled_qspec, 0, 100e-6)\n" + "plot_simulation(compiled_qspec, 0, 100e-6)" ] }, { @@ -1427,7 +1427,7 @@ "outputs": [], "source": [ "# run the experiment on qubit 0\n", - "qspec_results = session.run()\n" + "qspec_results = session.run()" ] }, { @@ -1459,7 +1459,7 @@ "plt.ylabel(\"A (a.u.)\")\n", "plt.xlabel(\"Frequency (GHz)\")\n", "\n", - "plt.show()\n" + "plt.show()" ] }, { @@ -1500,7 +1500,7 @@ " plt.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")\n", "\n", " qubit_parameters[\"q0\"][\"freq\"] = popt[1] - lo_settings[\"q0\"][\"shfsg_lo\"]\n", - " qubit_parameters[\"q0\"][\"freq\"]\n" + " qubit_parameters[\"q0\"][\"freq\"]" ] }, { @@ -1553,7 +1553,7 @@ " uid=f\"gaussian_drive_q{qubit}\",\n", " length=qubit_parameters[qubit][\"qb_len\"],\n", " amplitude=1,\n", - " )\n" + " )" ] }, { @@ -1566,7 +1566,7 @@ "# qubit drive frequency - defined in calibration on device setup as baseline reference\n", "lsg[\"q0\"][\"drive_line\"].oscillator.frequency = qubit_parameters[\"q0\"][\"freq\"]\n", "# set oscillator type to hardware to ensure optimal use of the instrument functionality\n", - "lsg[\"q0\"][\"drive_line\"].oscillator.modulation_type = ModulationType.HARDWARE\n" + "lsg[\"q0\"][\"drive_line\"].oscillator.modulation_type = ModulationType.HARDWARE" ] }, { @@ -1637,7 +1637,7 @@ " with exp_rabi.section(uid=\"delay\", length=1e-6):\n", " # relax time after readout - for qubit relaxation to groundstate and signal processing\n", " exp_rabi.reserve(signal=\"measure\")\n", - " return exp_rabi\n" + " return exp_rabi" ] }, { @@ -1672,7 +1672,7 @@ "exp_rabi.set_signal_map(signal_map_default(\"q0\"))\n", "\n", "# compile the experiment on the open instrument session\n", - "compiled_rabi = session.compile(exp_rabi)\n" + "compiled_rabi = session.compile(exp_rabi)" ] }, { @@ -1691,7 +1691,7 @@ "metadata": {}, "outputs": [], "source": [ - "show_pulse_sheet(\"Pulse_sheets/Amplitude_Rabi\", compiled_rabi)\n" + "show_pulse_sheet(\"Pulse_sheets/Amplitude_Rabi\", compiled_rabi)" ] }, { @@ -1711,7 +1711,7 @@ "outputs": [], "source": [ "# Simulate experiment\n", - "plot_simulation(compiled_rabi, 0, 100e-6)\n" + "plot_simulation(compiled_rabi, 0, 100e-6)" ] }, { @@ -1737,7 +1737,7 @@ "timestamp = time.strftime(\"%Y%m%dT%H%M%S\")\n", "Path(\"Results\").mkdir(parents=True, exist_ok=True)\n", "session.save_results(f\"Results/{timestamp}_rabi_results.json\")\n", - "print(f\"File saved as Results/{timestamp}_rabi_results.json\")\n" + "print(f\"File saved as Results/{timestamp}_rabi_results.json\")" ] }, { @@ -1755,7 +1755,7 @@ "\n", "if emulate:\n", " # create some dummy data if running in emulation mode\n", - " rabi_res = func_osc(rabi_amp, 10, 0, 1, 1.2) + 0.2 * np.random.rand(len(rabi_amp))\n" + " rabi_res = func_osc(rabi_amp, 10, 0, 1, 1.2) + 0.2 * np.random.rand(len(rabi_amp))" ] }, { @@ -1779,7 +1779,7 @@ "print(popt)\n", "\n", "# plot fit results together with measurement data\n", - "plt.plot(amp_plot, func_osc(amp_plot, *popt), \"-r\")\n" + "plt.plot(amp_plot, func_osc(amp_plot, *popt), \"-r\")" ] }, { @@ -1833,7 +1833,7 @@ " uid=f\"gaussian_drive_q{qubit}\",\n", " length=qubit_parameters[qubit][\"qb_len\"],\n", " amplitude=0.5,\n", - " )\n" + " )" ] }, { @@ -1898,7 +1898,7 @@ " with exp_ramsey.section(uid=\"delay\", length=1e-6):\n", " # relax time after readout - for qubit relaxation to groundstate and signal processing\n", " exp_ramsey.reserve(signal=\"measure\")\n", - " return exp_ramsey\n" + " return exp_ramsey" ] }, { @@ -1929,7 +1929,7 @@ "\n", "ramsey_exp.set_signal_map(signal_map_default(\"q0\"))\n", "\n", - "compiled_ramsey = session.compile(ramsey_exp)\n" + "compiled_ramsey = session.compile(ramsey_exp)" ] }, { @@ -1948,7 +1948,7 @@ "metadata": {}, "outputs": [], "source": [ - "show_pulse_sheet(\"Pulse_sheets/Ramsey\", compiled_ramsey)\n" + "show_pulse_sheet(\"Pulse_sheets/Ramsey\", compiled_ramsey)" ] }, { @@ -1967,7 +1967,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(compiled_ramsey, 0e-6, 100e-6, plot_width=10)\n" + "plot_simulation(compiled_ramsey, 0e-6, 100e-6, plot_width=10)" ] }, { @@ -1991,7 +1991,7 @@ "timestamp = time.strftime(\"%Y%m%dT%H%M%S\")\n", "Path(\"Results\").mkdir(parents=True, exist_ok=True)\n", "session.save_results(f\"Results/{timestamp}_ramsey_results.json\")\n", - "print(f\"File saved as Results/{timestamp}_ramsey_results.json\")\n" + "print(f\"File saved as Results/{timestamp}_ramsey_results.json\")" ] }, { @@ -2011,7 +2011,7 @@ " # create dummy data if running in emulation mode\n", " ramsey_res = func_decayOsc(\n", " ramsey_delay, 1e6, 0, 1 / 10e-6, amp=0.5, off=0.5\n", - " ) + 0.12 * np.random.rand(len(ramsey_delay))\n" + " ) + 0.12 * np.random.rand(len(ramsey_delay))" ] }, { @@ -2048,7 +2048,7 @@ "print(popt)\n", "\n", "# plot fit results together with experimental data\n", - "plt.plot(delay_plot, func_decayOsc(delay_plot, *popt), \"-r\")\n" + "plt.plot(delay_plot, func_decayOsc(delay_plot, *popt), \"-r\")" ] }, { diff --git a/examples/03_superconducting_qubits/01_single_qubit_tuneup_uhfqa_hdawg.ipynb b/examples/03_superconducting_qubits/01_single_qubit_tuneup_uhfqa_hdawg.ipynb index 051d862..de33f9b 100644 --- a/examples/03_superconducting_qubits/01_single_qubit_tuneup_uhfqa_hdawg.ipynb +++ b/examples/03_superconducting_qubits/01_single_qubit_tuneup_uhfqa_hdawg.ipynb @@ -8,7 +8,7 @@ "tags": [] }, "source": [ - "# Single Qubit Tuneup\n", + "# Single Qubit Tuneup with UHFQA and HDAWG\n", "\n", "In this notebook we demonstrate single qubit tuneup with the LabOne Q software, implemented as a sequence of experiments. \n", "\n", @@ -67,7 +67,7 @@ "from laboneq.contrib.example_helpers.plotting.plot_helpers import plot_simulation\n", "\n", "import matplotlib.pyplot as plt\n", - "import numpy as np\n" + "import numpy as np" ] }, { @@ -135,7 +135,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -194,7 +194,7 @@ "lo_settings = {\n", " \"qb_lo\": 5.3e9, # qubit LO frequency in [Hz]\n", " \"ro_lo\": 6.4e9, # readout LO frequency in [Hz]\n", - "}\n" + "}" ] }, { @@ -304,7 +304,7 @@ " port_delay=parameters[\"ro_delay\"] + parameters[\"ro_int_delay\"],\n", " )\n", "\n", - " return my_calibration\n" + " return my_calibration" ] }, { @@ -364,7 +364,7 @@ "lsg_q1 = my_setup.logical_signal_groups[\"q1\"].logical_signals\n", "drive_Oscillator_q1 = lsg_q1[\"drive_line\"].oscillator\n", "readout_Oscillator_q1 = lsg_q1[\"measure_line\"].oscillator\n", - "acquire_Oscillator_q1 = lsg_q1[\"acquire_line\"].oscillator\n" + "acquire_Oscillator_q1 = lsg_q1[\"acquire_line\"].oscillator" ] }, { @@ -390,7 +390,7 @@ "\n", "# create and connect to a session\n", "my_session = Session(device_setup=my_setup)\n", - "my_session.connect(do_emulation=emulate)\n" + "my_session.connect(do_emulation=emulate)" ] }, { @@ -466,7 +466,7 @@ "# spectroscopy excitation pulse\n", "readout_pulse_spec = pulse_library.const(\n", " length=qubit_parameters[\"ro_len_spec\"], amplitude=qubit_parameters[\"ro_amp_spec\"]\n", - ")\n" + ")" ] }, { @@ -531,7 +531,7 @@ " # holdoff time after signal acquisition - minimum 1us required for data processing on UHFQA\n", " exp_spec.delay(signal=\"measure\", time=1e-6)\n", "\n", - " return exp_spec\n" + " return exp_spec" ] }, { @@ -562,7 +562,7 @@ "res_spec_map_q1 = {\n", " \"measure\": \"/logical_signal_groups/q1/measure_line\",\n", " \"acquire\": \"/logical_signal_groups/q1/acquire_line\",\n", - "}\n" + "}" ] }, { @@ -605,7 +605,7 @@ "exp_spec.set_signal_map(res_spec_map_q0)\n", "\n", "# run the experiment on the open instrument session\n", - "my_results = my_session.run(exp_spec)\n" + "my_results = my_session.run(exp_spec)" ] }, { @@ -630,7 +630,7 @@ " + lo_settings[\"ro_lo\"],\n", " 1e7,\n", " 10,\n", - " ) + 0.2 * np.random.rand(len(spec_freq))\n" + " ) + 0.2 * np.random.rand(len(spec_freq))" ] }, { @@ -665,7 +665,7 @@ "print(popt)\n", "\n", "# plot the fit results together with the measurement data\n", - "ax1.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")\n" + "ax1.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")" ] }, { @@ -677,7 +677,7 @@ "source": [ "# update qubit parameter dictionary with results from data fitting\n", "qubit_parameters[\"ro_freq_q0\"] = popt[1] - lo_settings[\"ro_lo\"]\n", - "qubit_parameters[\"ro_freq_q0\"]\n" + "qubit_parameters[\"ro_freq_q0\"]" ] }, { @@ -707,7 +707,7 @@ "exp_spec.set_signal_map(res_spec_map_q1)\n", "\n", "# run the experiment on the open instrument session\n", - "my_results = my_session.run(exp_spec)\n" + "my_results = my_session.run(exp_spec)" ] }, { @@ -732,7 +732,7 @@ " + lo_settings[\"ro_lo\"],\n", " 1e7,\n", " 10,\n", - " ) + 0.2 * np.random.rand(len(spec_freq))\n" + " ) + 0.2 * np.random.rand(len(spec_freq))" ] }, { @@ -767,7 +767,7 @@ "print(popt)\n", "\n", "# plot the fit results together with the measurement data\n", - "ax1.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")\n" + "ax1.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")" ] }, { @@ -779,7 +779,7 @@ "source": [ "# update qubit parameter dictionary with results from data fitting\n", "qubit_parameters[\"ro_freq_q1\"] = popt[1] - lo_settings[\"ro_lo\"]\n", - "qubit_parameters[\"ro_freq_q1\"]\n" + "qubit_parameters[\"ro_freq_q1\"]" ] }, { @@ -854,7 +854,7 @@ "# integration weights for qubit measurement - here simple constant weights, i.e. all parts of the return signal are weighted equally\n", "readout_weighting_function = pulse_library.const(\n", " uid=\"readout_weighting_function\", length=qubit_parameters[\"ro_len\"], amplitude=1.0\n", - ")\n" + ")" ] }, { @@ -908,7 +908,7 @@ " exp.delay(signal=readout_id, time=relax_time)\n", " # make sure that the drive line is reserved also for the relax time, if selected\n", " if reserve_id is not None:\n", - " exp.reserve(signal=reserve_id)\n" + " exp.reserve(signal=reserve_id)" ] }, { @@ -945,7 +945,7 @@ "readout_Oscillator_q1.frequency = qubit_parameters[\"ro_freq_q1\"]\n", "readout_Oscillator_q1.modulation_type = ModulationType.SOFTWARE\n", "acquire_Oscillator_q1.frequency = qubit_parameters[\"ro_freq_q1\"]\n", - "acquire_Oscillator_q1.modulation_type = ModulationType.SOFTWARE\n" + "acquire_Oscillator_q1.modulation_type = ModulationType.SOFTWARE" ] }, { @@ -1012,7 +1012,7 @@ " relax_time=qubit_parameters[\"relax\"],\n", " )\n", "\n", - " return exp_qspec\n" + " return exp_qspec" ] }, { @@ -1050,7 +1050,7 @@ " \"drive\": \"/logical_signal_groups/q1/drive_line\",\n", " \"measure\": \"/logical_signal_groups/q1/measure_line\",\n", " \"acquire\": \"/logical_signal_groups/q1/acquire_line\",\n", - "}\n" + "}" ] }, { @@ -1088,7 +1088,7 @@ "exp_qspec.set_signal_map(q0_map)\n", "\n", "# run the experiment on qubit 0\n", - "my_results = my_session.run(exp_qspec)\n" + "my_results = my_session.run(exp_qspec)" ] }, { @@ -1099,7 +1099,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(my_session.compiled_experiment, 0, 40e-6)\n" + "plot_simulation(my_session.compiled_experiment, 0, 40e-6)" ] }, { @@ -1124,7 +1124,7 @@ " + lo_settings[\"qb_lo\"],\n", " 2e6,\n", " 1,\n", - " ) + 0.1 * np.random.rand(len(qspec_freq))\n" + " ) + 0.1 * np.random.rand(len(qspec_freq))" ] }, { @@ -1157,7 +1157,7 @@ "print(popt)\n", "\n", "# plot fit results together with measurement data\n", - "plt.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")\n" + "plt.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")" ] }, { @@ -1169,7 +1169,7 @@ "source": [ "# update qubit parameters\n", "qubit_parameters[\"qb0_freq\"] = popt[1] - lo_settings[\"qb_lo\"]\n", - "qubit_parameters[\"qb0_freq\"]\n" + "qubit_parameters[\"qb0_freq\"]" ] }, { @@ -1196,7 +1196,7 @@ "exp_qspec.set_signal_map(q1_map)\n", "\n", "# run the experiment on qubit 1\n", - "my_results = my_session.run(exp_qspec)\n" + "my_results = my_session.run(exp_qspec)" ] }, { @@ -1221,7 +1221,7 @@ " + lo_settings[\"qb_lo\"],\n", " 2e6,\n", " 1,\n", - " ) + 0.1 * np.random.rand(len(qspec_freq))\n" + " ) + 0.1 * np.random.rand(len(qspec_freq))" ] }, { @@ -1254,7 +1254,7 @@ "print(popt)\n", "\n", "# plot fit results together with measurement data\n", - "plt.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")\n" + "plt.plot(freq_plot / 1e9, func_invLorentz(freq_plot, *popt), \"-r\")" ] }, { @@ -1266,7 +1266,7 @@ "source": [ "# update qubit parameters\n", "qubit_parameters[\"qb1_freq\"] = popt[1] - lo_settings[\"qb_lo\"]\n", - "qubit_parameters[\"qb1_freq\"]\n" + "qubit_parameters[\"qb1_freq\"]" ] }, { @@ -1321,7 +1321,7 @@ "# Rabi excitation pulse - gaussian of unit amplitude - amplitude will be scaled with sweep parameter in experiment\n", "gaussian_pulse = pulse_library.gaussian(\n", " uid=\"gaussian_pulse\", length=qubit_parameters[\"qb_len\"], amplitude=1.0\n", - ")\n" + ")" ] }, { @@ -1351,7 +1351,7 @@ "drive_Oscillator_q0.modulation_type = ModulationType.HARDWARE\n", "\n", "drive_Oscillator_q1.frequency = qubit_parameters[\"qb1_freq\"]\n", - "drive_Oscillator_q1.modulation_type = ModulationType.HARDWARE\n" + "drive_Oscillator_q1.modulation_type = ModulationType.HARDWARE" ] }, { @@ -1416,7 +1416,7 @@ " readout_pulse=readout_pulse,\n", " readout_weights=readout_weighting_function,\n", " relax_time=qubit_parameters[\"relax\"],\n", - " )\n" + " )" ] }, { @@ -1437,7 +1437,7 @@ " \"drive\": lsg_q1[\"drive_line\"],\n", " \"measure\": lsg_q1[\"measure_line\"],\n", " \"acquire\": lsg_q1[\"acquire_line\"],\n", - "}\n" + "}" ] }, { @@ -1475,7 +1475,7 @@ "exp_rabi.set_signal_map(q0_map)\n", "\n", "# run the experiment on qubit 0\n", - "my_results = my_session.run(exp_rabi)\n" + "my_results = my_session.run(exp_rabi)" ] }, { @@ -1486,7 +1486,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(my_session.compiled_experiment, 0, 40e-6)\n" + "plot_simulation(my_session.compiled_experiment, 0, 40e-6)" ] }, { @@ -1512,7 +1512,7 @@ "exp_rabi.set_signal_map(q1_map)\n", "\n", "# run the experiment on qubit 1\n", - "my_results = my_session.run(exp_rabi)\n" + "my_results = my_session.run(exp_rabi)" ] }, { @@ -1539,7 +1539,7 @@ "\n", "if emulate:\n", " # create some dummy data if running in emulation mode\n", - " rabi_res = func_osc(rabi_amp, 10, 0, 1, 1.2) + 0.2 * np.random.rand(len(rabi_amp))\n" + " rabi_res = func_osc(rabi_amp, 10, 0, 1, 1.2) + 0.2 * np.random.rand(len(rabi_amp))" ] }, { @@ -1563,7 +1563,7 @@ "print(popt)\n", "\n", "# plot fit results together with measurement data\n", - "plt.plot(amp_plot, func_osc(amp_plot, *popt), \"-r\")\n" + "plt.plot(amp_plot, func_osc(amp_plot, *popt), \"-r\")" ] }, { @@ -1579,7 +1579,7 @@ "print(qubit_parameters[\"pi_amp\"])\n", "\n", "qubit_parameters[\"pihalf_amp\"] = np.pi / 2 / (popt[0])\n", - "qubit_parameters[\"pihalf_amp\"]\n" + "qubit_parameters[\"pihalf_amp\"]" ] }, { @@ -1630,7 +1630,7 @@ "# T1 excitation pulse - qubit pi pulse\n", "x180 = pulse_library.gaussian(\n", " uid=\"x180\", length=qubit_parameters[\"qb_len\"], amplitude=qubit_parameters[\"pi_amp\"]\n", - ")\n" + ")" ] }, { @@ -1700,7 +1700,7 @@ " readout_pulse=readout_pulse,\n", " readout_weights=readout_weighting_function,\n", " relax_time=qubit_parameters[\"relax\"],\n", - " )\n" + " )" ] }, { @@ -1736,7 +1736,7 @@ "exp_t1.set_signal_map(q0_map)\n", "\n", "# run the experiment on qubit 0\n", - "my_results = my_session.run(exp_t1)\n" + "my_results = my_session.run(exp_t1)" ] }, { @@ -1747,7 +1747,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(my_session.compiled_experiment, 50e-6, 1e-6)\n" + "plot_simulation(my_session.compiled_experiment, 50e-6, 1e-6)" ] }, { @@ -1772,7 +1772,7 @@ "exp_t1.set_signal_map(q1_map)\n", "\n", "# run the experiment on qubit 1\n", - "my_results = my_session.run(exp_t1)\n" + "my_results = my_session.run(exp_t1)" ] }, { @@ -1803,7 +1803,7 @@ " # create dummy data if running in emulation mode\n", " t1_res = func_exp(\n", " t1_delay, 2 / qubit_parameters[\"T1\"], 0.1, 1\n", - " ) + 0.1 * np.random.rand(len(t1_delay))\n" + " ) + 0.1 * np.random.rand(len(t1_delay))" ] }, { @@ -1827,7 +1827,7 @@ "print(popt)\n", "\n", "# plot fit results together with measurement data\n", - "plt.plot(delay_plot, func_exp(delay_plot, *popt), \"-r\")\n" + "plt.plot(delay_plot, func_exp(delay_plot, *popt), \"-r\")" ] }, { @@ -1839,7 +1839,7 @@ "source": [ "# update qubit parameters - here relaxation time / qubit lifetime\n", "qubit_parameters[\"T1\"] = 1 / popt[0]\n", - "qubit_parameters[\"T1\"]\n" + "qubit_parameters[\"T1\"]" ] }, { @@ -1891,7 +1891,7 @@ " uid=\"x90\",\n", " length=qubit_parameters[\"qb_len\"],\n", " amplitude=qubit_parameters[\"pihalf_amp\"],\n", - ")\n" + ")" ] }, { @@ -1959,7 +1959,7 @@ " readout_pulse=readout_pulse,\n", " readout_weights=readout_weighting_function,\n", " relax_time=qubit_parameters[\"relax\"],\n", - " )\n" + " )" ] }, { @@ -1985,7 +1985,7 @@ " frequency=qubit_parameters[\"qb1_freq\"] + 2 * qubit_parameters[\"ramsey_det\"],\n", " modulation_type=ModulationType.HARDWARE,\n", " )\n", - ")\n" + ")" ] }, { @@ -2018,7 +2018,7 @@ "exp_ramsey.set_signal_map(q0_map)\n", "\n", "# run experiment on qubit 0\n", - "my_results = my_session.run(exp_ramsey)\n" + "my_results = my_session.run(exp_ramsey)" ] }, { @@ -2029,7 +2029,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(my_session.compiled_experiment, 26e-6, 2e-6)\n" + "plot_simulation(my_session.compiled_experiment, 26e-6, 2e-6)" ] }, { @@ -2053,7 +2053,7 @@ "exp_ramsey.set_signal_map(q1_map)\n", "\n", "# run experiment on qubit 1\n", - "my_results = my_session.run(exp_ramsey)\n" + "my_results = my_session.run(exp_ramsey)" ] }, { @@ -2087,7 +2087,7 @@ " 1 / qubit_parameters[\"T2_ramsey\"],\n", " amp=0.5,\n", " off=0.5,\n", - " ) + 0.12 * np.random.rand(len(ramsey_delay))\n" + " ) + 0.12 * np.random.rand(len(ramsey_delay))" ] }, { @@ -2124,7 +2124,7 @@ "print(popt)\n", "\n", "# plot fit results together with experimental data\n", - "plt.plot(delay_plot, func_decayOsc(delay_plot, *popt), \"-r\")\n" + "plt.plot(delay_plot, func_decayOsc(delay_plot, *popt), \"-r\")" ] }, { @@ -2136,7 +2136,7 @@ "source": [ "# update qubit parameters - here qubit dephasing time\n", "qubit_parameters[\"T2_ramsey\"] = 1 / popt[2]\n", - "qubit_parameters[\"T2_ramsey\"]\n" + "qubit_parameters[\"T2_ramsey\"]" ] }, { @@ -2191,7 +2191,7 @@ ")\n", "x180 = pulse_library.gaussian(\n", " uid=\"x180\", length=qubit_parameters[\"qb_len\"], amplitude=qubit_parameters[\"pi_amp\"]\n", - ")\n" + ")" ] }, { @@ -2261,7 +2261,7 @@ " readout_pulse=readout_pulse,\n", " readout_weights=readout_weighting_function,\n", " relax_time=qubit_parameters[\"relax\"],\n", - " )\n" + " )" ] }, { @@ -2296,7 +2296,7 @@ "exp_echo.set_signal_map(q0_map)\n", "\n", "# run experiment on qubit 0\n", - "my_results = my_session.run(exp_echo)\n" + "my_results = my_session.run(exp_echo)" ] }, { @@ -2307,7 +2307,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(my_session.compiled_experiment, 360e-6, 5e-6)\n" + "plot_simulation(my_session.compiled_experiment, 360e-6, 5e-6)" ] }, { @@ -2333,7 +2333,7 @@ "exp_echo.set_signal_map(q1_map)\n", "\n", "# run experiment on qubit 1\n", - "my_results = my_session.run(exp_echo)\n" + "my_results = my_session.run(exp_echo)" ] }, { @@ -2362,7 +2362,7 @@ " # create dummy data if running in emulation mode\n", " echo_res = func_exp(\n", " echo_delay, 2 / qubit_parameters[\"T2_echo\"], 0.1, -1\n", - " ) + 0.1 * np.random.rand(len(echo_delay))\n" + " ) + 0.1 * np.random.rand(len(echo_delay))" ] }, { @@ -2386,7 +2386,7 @@ "print(popt)\n", "\n", "# plot fit results together with experimental data\n", - "plt.plot(delay_plot, func_exp(delay_plot, *popt), \"-r\")\n" + "plt.plot(delay_plot, func_exp(delay_plot, *popt), \"-r\")" ] }, { @@ -2398,7 +2398,7 @@ "source": [ "# update qubit parameters - here qubit dephasing time\n", "qubit_parameters[\"T2_echo\"] = 1 / popt[0]\n", - "qubit_parameters[\"T2_echo\"]\n" + "qubit_parameters[\"T2_echo\"]" ] }, { diff --git a/examples/03_superconducting_qubits/02_two_qubit_experiments_uhfqa_hdawg.ipynb b/examples/03_superconducting_qubits/02_two_qubit_experiments_uhfqa_hdawg.ipynb index 9183ebe..e9731e9 100644 --- a/examples/03_superconducting_qubits/02_two_qubit_experiments_uhfqa_hdawg.ipynb +++ b/examples/03_superconducting_qubits/02_two_qubit_experiments_uhfqa_hdawg.ipynb @@ -6,7 +6,7 @@ "id": "0ed599f8-b026-4714-8243-4df4bc485ea6", "metadata": {}, "source": [ - "# Two qubit experiments \n", + "# Two qubit experiments with UHFQA and HDAWG\n", "\n", "In this reference notebook we show how to define basic two qubit tuneup experiments - simultaneous Rabi and simultaneous Ramsey" ] @@ -48,7 +48,7 @@ "from laboneq.contrib.example_helpers.plotting.plot_helpers import (\n", " plot_result_2d,\n", " plot_simulation,\n", - ")\n" + ")" ] }, { @@ -116,7 +116,7 @@ " device_pqsc:\n", " - to: device_hdawg\n", " port: ZSYNCS/0\n", - "\"\"\"\n" + "\"\"\"" ] }, { @@ -249,7 +249,7 @@ " # delays the start of integration in relation to the start of the readout pulse to compensate for signal propagation time\n", " port_delay=10e-9,\n", " delay_signal=0,\n", - " )\n" + " )" ] }, { @@ -286,7 +286,7 @@ "\n", "\n", "# create device setup\n", - "device_setup = create_device_setup()\n" + "device_setup = create_device_setup()" ] }, { @@ -297,7 +297,7 @@ "outputs": [], "source": [ "# use emulation mode - change, if running on hardware\n", - "use_emulation = True\n" + "use_emulation = True" ] }, { @@ -360,7 +360,7 @@ "lsg[\"measure_line\"].oscillator.modulation_type = ModulationType.SOFTWARE\n", "lsg[\"acquire_line\"].calibration.port_delay = 20e-9\n", "lsg[\"acquire_line\"].calibration.oscillator.frequency = 50e6\n", - "lsg[\"acquire_line\"].oscillator.modulation_type = ModulationType.SOFTWARE\n" + "lsg[\"acquire_line\"].oscillator.modulation_type = ModulationType.SOFTWARE" ] }, { @@ -398,7 +398,7 @@ " )\n", " # relax time after readout - for signal processing and qubit relaxation to groundstate\n", " with exp.section():\n", - " exp.delay(signal=measure_id, time=1e-6)\n" + " exp.delay(signal=measure_id, time=1e-6)" ] }, { @@ -470,7 +470,7 @@ " acquire_id=\"acquire_q1\",\n", " acquire_handle=\"q1\",\n", " sweep_parameter=sweep_parameter_q1,\n", - " )\n" + " )" ] }, { @@ -496,7 +496,7 @@ " \"acquire_q1\": device_setup.logical_signal_groups[\"q1\"].logical_signals[\n", " \"acquire_line\"\n", " ],\n", - "}\n" + "}" ] }, { @@ -523,7 +523,7 @@ "session.connect(do_emulation=use_emulation)\n", "\n", "# run experiment on both qubit 0 and qubit 1\n", - "my_results = session.run(exp)\n" + "my_results = session.run(exp)" ] }, { @@ -539,7 +539,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(session.compiled_experiment, 0, 10e-6)\n" + "plot_simulation(session.compiled_experiment, 0, 10e-6)" ] }, { @@ -550,7 +550,7 @@ "outputs": [], "source": [ "# plot measurement results - qubit 0\n", - "plot_result_2d(my_results, \"q0\", mult_axis=0)\n" + "plot_result_2d(my_results, \"q0\", mult_axis=0)" ] }, { @@ -561,7 +561,7 @@ "outputs": [], "source": [ "# plot measurement results - qubit 1\n", - "plot_result_2d(my_results, \"q1\", mult_axis=1)\n" + "plot_result_2d(my_results, \"q1\", mult_axis=1)" ] }, { @@ -573,7 +573,7 @@ "source": [ "# use pulse sheet viewer to display the pulse sequence - only recommended for small number of averages and sweep steps to avoid performance issues\n", "compiled_exp = session.compiled_experiment\n", - "show_pulse_sheet(\"Amplitude Rabi for two\", compiled_exp)\n" + "show_pulse_sheet(\"Amplitude Rabi for two\", compiled_exp)" ] }, { @@ -618,7 +618,7 @@ "# readout integration weights\n", "readout_weighting_function = pulse_library.const(\n", " uid=\"readout_weighting_function\", length=400e-9, amplitude=1.0\n", - ")\n" + ")" ] }, { @@ -659,7 +659,7 @@ " )\n", " # relax time after readout - for signal processing and qubit relaxation to groundstate\n", " with exp.section():\n", - " exp.delay(signal=measure_id, time=1e-6)\n" + " exp.delay(signal=measure_id, time=1e-6)" ] }, { @@ -731,7 +731,7 @@ " sweep_parameter=sweep_parameter,\n", " excitation_length=drive_length,\n", " excitation_pulse=x90_q1,\n", - " )\n" + " )" ] }, { @@ -757,7 +757,7 @@ " \"acquire_q1\": device_setup.logical_signal_groups[\"q1\"].logical_signals[\n", " \"acquire_line\"\n", " ],\n", - "}\n" + "}" ] }, { @@ -784,7 +784,7 @@ "session.connect(do_emulation=use_emulation)\n", "\n", "# run on both qubits simultaneously\n", - "my_results = session.run(exp)\n" + "my_results = session.run(exp)" ] }, { @@ -800,7 +800,7 @@ "outputs": [], "source": [ "# Plot simulated output signals\n", - "plot_simulation(session.compiled_experiment, 0, 10e-6)\n" + "plot_simulation(session.compiled_experiment, 0, 10e-6)" ] }, { @@ -811,7 +811,7 @@ "outputs": [], "source": [ "# plot measurement results - qubit 0\n", - "plot_result_2d(my_results, \"q0\")\n" + "plot_result_2d(my_results, \"q0\")" ] }, { @@ -822,7 +822,7 @@ "outputs": [], "source": [ "# plot measurement results - qubit 1\n", - "plot_result_2d(my_results, \"q1\")\n" + "plot_result_2d(my_results, \"q1\")" ] }, { @@ -834,7 +834,7 @@ "source": [ "# use pulse sheet viewer to display the pulse sequence - only recommended for small number of averages and sweep steps to avoid performance issues\n", "compiled_exp = session.compiled_experiment\n", - "show_pulse_sheet(\"Ramsey for two\", compiled_exp)\n" + "show_pulse_sheet(\"Ramsey for two\", compiled_exp)" ] }, { diff --git a/examples/03_superconducting_qubits/03_qubit_tuneup_shfqc_ext_dc_source.ipynb b/examples/03_superconducting_qubits/03_qubit_tuneup_shfqc_ext_dc_source.ipynb index 5e1d0fd..68c581a 100644 --- a/examples/03_superconducting_qubits/03_qubit_tuneup_shfqc_ext_dc_source.ipynb +++ b/examples/03_superconducting_qubits/03_qubit_tuneup_shfqc_ext_dc_source.ipynb @@ -8,7 +8,7 @@ "tags": [] }, "source": [ - "# DC-Biased 2D Resonator and Qubit Spectroscopy with LabOne Q Software - SHF Instruments\n", + "# DC-Biased 2D Resonator and Qubit Spectroscopy with SHF Instruments\n", "\n", "In this notebook we demonstrate resonator and qubit spectroscopy experiments showing how an external DC source can be incoportated using the `call` function with the LabOne Q software. \n", "\n", diff --git a/examples/03_superconducting_qubits/04_parallel_qubit_tuneup_shfqc_hdawg_pqsc.ipynb b/examples/03_superconducting_qubits/04_parallel_qubit_tuneup_shfqc_hdawg_pqsc.ipynb new file mode 100644 index 0000000..a6f017c --- /dev/null +++ b/examples/03_superconducting_qubits/04_parallel_qubit_tuneup_shfqc_hdawg_pqsc.ipynb @@ -0,0 +1,1463 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "fe2d00d7-c88e-4c5c-a2ca-8d6bc78ebe3e", + "metadata": { + "tags": [] + }, + "source": [ + "# Superconducting Qubit Tune-up with LabOne Q for SHF Instruments and many qubits in parallel\n", + "\n", + "In this notebook we demonstrate qubit tuneup with LabOne Q for many qubits in parallel, implemented as a sequence of experiments. \n", + "\n", + "Before starting the experiments, we define a set of initial qubit parameters, as might be known from fabrication. \n", + "\n", + "These parameters can then be used to update the baseline calibration used in the experiments and defined as part of the device setup." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "4d4e7d0b-b53a-40e4-831c-236ed9d97c42", + "metadata": {}, + "source": [ + "# 0. General Imports and Definitions" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "f3ecf2ff-f304-472a-b6d2-a50792a39a0d", + "metadata": { + "tags": [] + }, + "source": [ + "## 0.1 Python Imports " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f383ab20", + "metadata": { + "ExecuteTime": { + "end_time": "2021-09-20T12:02:48.093782Z", + "start_time": "2021-09-20T12:02:46.601892Z" + } + }, + "outputs": [], + "source": [ + "# convenience Import for all LabOne Q Functionality\n", + "from laboneq.simple import *\n", + "\n", + "# plotting functionality\n", + "from laboneq.contrib.example_helpers.plotting.plot_helpers import (\n", + " plot_simulation,\n", + " plot_results,\n", + ")\n", + "\n", + "# base qubit parameters and function to traslate into Transmon class\n", + "from laboneq.contrib.example_helpers.example_notebook_helper import (\n", + " create_transmon,\n", + " generate_base_transmon_parameters,\n", + ")\n", + "from laboneq.contrib.example_helpers.generate_descriptor import generate_descriptor\n", + "\n", + "# for saving results and pulse sheets\n", + "import datetime\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3afea916", + "metadata": {}, + "outputs": [], + "source": [ + "# set up connection to database\n", + "my_setup_db = DataStore(\"laboneq_data/setup_database.db\")\n", + "\n", + "my_results_db = DataStore(\"laboneq_data/results_database.db\")\n", + "\n", + "# check if data is already stored in database\n", + "for key in my_setup_db.keys():\n", + " print(key)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "0d9141f7-3fc4-4e0e-b553-59ea490e84cc", + "metadata": { + "tags": [] + }, + "source": [ + "# 1. Define the Instrument Setup - auto-define depending on number of qubits used" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ff92991b", + "metadata": {}, + "outputs": [], + "source": [ + "number_of_qubits = 24" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fa32aedf", + "metadata": {}, + "outputs": [], + "source": [ + "device_setup_descriptor = generate_descriptor(\n", + " pqsc=[\"dev10001\"],\n", + " hdawg_8=[\n", + " \"dev800\" + str(it) for it in range(1, int(np.ceil(number_of_qubits / 8)) + 1)\n", + " ],\n", + " shfqc_6=[\n", + " \"dev1200\" + str(it) for it in range(1, int(np.ceil(number_of_qubits / 6)) + 1)\n", + " ],\n", + " multiplex=True,\n", + " number_multiplex=6,\n", + " number_data_qubits=number_of_qubits,\n", + " number_flux_lines=number_of_qubits,\n", + ")\n", + "\n", + "device_setup = DeviceSetup.from_descriptor(\n", + " yaml_text=device_setup_descriptor,\n", + " server_host=\"localhost\", # ip address of the LabOne dataserver used to communicate with the instruments\n", + " server_port=\"8004\", # port number of the dataserver - default is 8004\n", + " setup_name=\"my_\" + str(number_of_qubits) + \"qubit_setup\", # setup name\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "f147afc0-dcde-4475-a96c-2a5e53e5176e", + "metadata": { + "tags": [] + }, + "source": [ + "# 2. Create Qubits, Apply Calibration Data, Connect to the Instruments" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "9815c045", + "metadata": {}, + "source": [ + "## 2.1 Create Qubits, use them to generate and set calibration to the device setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd9da9dd", + "metadata": {}, + "outputs": [], + "source": [ + "# import / generate base parameter set for all qubits\n", + "base_qubit_parameters = generate_base_transmon_parameters(number_of_qubits)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "450e85d8", + "metadata": {}, + "outputs": [], + "source": [ + "# create qubits from base parameters\n", + "my_qubits = [\n", + " create_transmon(\n", + " it, base_parameters=base_qubit_parameters, device_setup=device_setup\n", + " )\n", + " for it in range(number_of_qubits)\n", + "]\n", + "\n", + "for qubit in my_qubits:\n", + " device_setup.set_calibration(qubit.calibration(set_local_oscillators=True))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39b53d50", + "metadata": {}, + "outputs": [], + "source": [ + "# save device setup and calibration to database\n", + "\n", + "my_setup_db.store(\n", + " data=device_setup,\n", + " key=\"device_setup\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")\n", + "\n", + "my_setup_db.store(\n", + " data=device_setup.get_calibration(),\n", + " key=\"device_setup_calibration\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "62ae58f8-4016-43e2-8c33-ee88645c7268", + "metadata": {}, + "source": [ + "## 2.2 Create and Connect to a LabOne Q Session \n", + "\n", + "Establishes the connection to the instruments and readies them for experiments" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6bb93815", + "metadata": {}, + "outputs": [], + "source": [ + "# perform experiments in emulation mode only? - if True, also generate dummy data for fitting\n", + "emulate = True\n", + "\n", + "# create and connect to a session\n", + "session = Session(device_setup=device_setup)\n", + "session.connect(do_emulation=emulate)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "7f026e91-dff5-4548-9ecd-fb6db4f85a50", + "metadata": { + "tags": [] + }, + "source": [ + "# 3. Qubit Tuneup - Experimental Sequence\n", + "\n", + "Sequence of experiments for tuneup from scratch of a superconducting qubit in circuit QED architecture " + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "18e4a41a-b1c7-432e-8cbf-d845da55b19a", + "metadata": { + "tags": [] + }, + "source": [ + "## 3.1 Resonator Spectroscopy: CW in parallel over full range of 0.5 - 8.5 GHz\n", + "\n", + "Find the resonance frequency of the qubit readout resonator by looking at the transmission or reflection of a probe signal applied through the readout line" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "44bed766-917f-4d01-98c8-86c6e841fc44", + "metadata": { + "tags": [] + }, + "source": [ + "### 3.1.1 Experiment Definition\n", + "\n", + "Define the experimental pulse and readout sequence - here without any explicit qubit reference\n", + "\n", + "Explicit qubit reference is then given through different experimental calibration and signal maps" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ec430d0c", + "metadata": {}, + "outputs": [], + "source": [ + "# define sweep parameter\n", + "def create_freq_sweep(id, start_freq, stop_freq, num_points):\n", + " return LinearSweepParameter(\n", + " uid=f\"frequency_sweep_{id}\",\n", + " start=start_freq,\n", + " stop=stop_freq,\n", + " count=num_points,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1eb991d0", + "metadata": {}, + "outputs": [], + "source": [ + "def resonator_spectroscopy_parallel_CW(\n", + " qubits, outer_sweep, inner_sweep, integration_time=10e-3, num_averages=1\n", + "):\n", + " # Create resonator spectroscopy experiment - uses only readout drive and signal acquisition\n", + " exp_spec = Experiment(\n", + " uid=\"Resonator Spectroscopy\",\n", + " signals=[\n", + " signal\n", + " for signal_list in [\n", + " [\n", + " ExperimentSignal(\n", + " f\"measure_{qubit.uid}\", map_to=qubit.signals[\"measure\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"acquire_{qubit.uid}\", map_to=qubit.signals[\"acquire\"]\n", + " ),\n", + " ]\n", + " for qubit in qubits\n", + " ]\n", + " for signal in signal_list\n", + " ],\n", + " )\n", + "\n", + " ## define experimental sequence\n", + " # loop - average multiple measurements for each frequency - measurement in spectroscopy mode\n", + " with exp_spec.sweep(uid=\"resonator_frequency_outer\", parameter=outer_sweep):\n", + " with exp_spec.acquire_loop_rt(\n", + " uid=\"shots\",\n", + " count=num_averages,\n", + " acquisition_type=AcquisitionType.SPECTROSCOPY,\n", + " ):\n", + " with exp_spec.sweep(uid=\"resonator_frequency_inner\", parameter=inner_sweep):\n", + " for qubit in qubits:\n", + " # readout pulse and data acquisition\n", + " with exp_spec.section(uid=f\"resonator_spectroscopy_{qubit.uid}\"):\n", + " # resonator signal readout\n", + " exp_spec.acquire(\n", + " signal=f\"acquire_{qubit.uid}\",\n", + " handle=f\"resonator_spectroscopy_{qubit.uid}\",\n", + " length=integration_time,\n", + " )\n", + " with exp_spec.section(uid=f\"delay_{qubit.uid}\", length=1e-6):\n", + " # holdoff time after signal acquisition\n", + " exp_spec.reserve(signal=f\"measure_{qubit.uid}\")\n", + "\n", + " cal = Calibration()\n", + " local_oscillator = Oscillator(frequency=outer_sweep)\n", + " for qubit in qubits:\n", + " cal[f\"measure_{qubit.uid}\"] = SignalCalibration(\n", + " oscillator=Oscillator(\n", + " frequency=inner_sweep,\n", + " ),\n", + " local_oscillator=local_oscillator,\n", + " range=-10,\n", + " )\n", + " cal[f\"acquire_{qubit.uid}\"] = SignalCalibration(\n", + " local_oscillator=local_oscillator,\n", + " range=-5,\n", + " port_delay=250e-9,\n", + " )\n", + " exp_spec.set_calibration(cal)\n", + "\n", + " return exp_spec" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "818a5d99-8982-4758-8120-446be6f0a016", + "metadata": { + "tags": [] + }, + "source": [ + "### 3.1.2 Run and Evaluate Experiment\n", + "Runs the experiment and evaluates the data returned by the measurement" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f652c916", + "metadata": {}, + "outputs": [], + "source": [ + "# create experiment with outer, near-time sweep from 1-8 GHz in 1 GHz steps and a sweep over 1001 points within each 1GHz band - in parallel for all 4 QA channels\n", + "cw_spectroscopy_exp = resonator_spectroscopy_parallel_CW(\n", + " my_qubits[::6],\n", + " create_freq_sweep(\"outer\", 1e9, 8e9, 8),\n", + " create_freq_sweep(\"inner\", -500e6, 500e6, 1001),\n", + ")\n", + "\n", + "compiled_cw_spectroscopy_exp = session.compile(cw_spectroscopy_exp)\n", + "cw_spectroscopy_results = session.run(compiled_cw_spectroscopy_exp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f02c6582", + "metadata": {}, + "outputs": [], + "source": [ + "# save results to database\n", + "my_results_db.store(\n", + " data=cw_spectroscopy_results,\n", + " key=f\"cw_spectroscopy_results_{datetime.datetime.now()}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d21b19c1", + "metadata": {}, + "outputs": [], + "source": [ + "# access and plot results of one 8GHz sweep\n", + "full_data = cw_spectroscopy_results.get_data(\"resonator_spectroscopy_q0\")\n", + "\n", + "outer = cw_spectroscopy_results.get_axis(\"resonator_spectroscopy_q0\")[0]\n", + "inner = cw_spectroscopy_results.get_axis(\"resonator_spectroscopy_q0\")[1]\n", + "full_sweep = np.array(\n", + " [item for item_list in [out + inner for out in outer] for item in item_list]\n", + ")\n", + "\n", + "plt.plot(\n", + " full_sweep,\n", + " np.array(\n", + " [item for item_list in [data for data in full_data] for item in item_list]\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ab1198de", + "metadata": {}, + "outputs": [], + "source": [ + "# Do analysis of data here" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "d9a8dc1f", + "metadata": {}, + "source": [ + "### 3.1.3 Update Calibration and save to database " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e2b228d2", + "metadata": {}, + "outputs": [], + "source": [ + "# update qubit parameters from analysis\n", + "# for qubit in my_qubits:\n", + "# qubit.parameters.readout_resonator_frequency = my_new_frequency\n", + "# device_setup.set_calibration(qubit.calibration())\n", + "\n", + "\n", + "# store new device setup including calibration in database\n", + "my_setup_db.store(\n", + " data=device_setup,\n", + " # use same key to overwrite previous device setup or different to store individual instances\n", + " key=\"device_setup_CW\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "8a682b18-6de8-4b17-97e9-08d92ac239bd", + "metadata": { + "tags": [] + }, + "source": [ + "## 3.2 Pulsed Qubit Spectroscopy: in parallel over 100MHz range for each qubit\n", + "\n", + "Find the resonance frequency of the qubit by looking at the change in resonator transmission when sweeping the frequency of a qubit excitation pulse" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "fefd645d-ceee-41d3-a86c-403d63d5b4f1", + "metadata": {}, + "source": [ + "### 3.2.1 Experiment Definition\n", + "\n", + "The frequency sweep of the drive line can now be done in real time (was: near time in older software releases)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3eb24425", + "metadata": {}, + "outputs": [], + "source": [ + "def qubit_spectroscopy_pulse(qubit):\n", + " return pulse_library.const(\n", + " uid=f\"spectroscopy_pulse_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"readout_length\"],\n", + " amplitude=0.8,\n", + " # can_compress=True,\n", + " )\n", + "\n", + "\n", + "def readout_pulse(qubit):\n", + " return pulse_library.const(\n", + " uid=f\"readout_pulse_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"readout_length\"],\n", + " amplitude=qubit.parameters.user_defined[\"readout_amplitude\"],\n", + " )\n", + "\n", + "\n", + "def integration_kernel(qubit):\n", + " return pulse_library.const(\n", + " uid=f\"integration_kernel_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"readout_length\"],\n", + " amplitude=1,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4106f823", + "metadata": { + "ExecuteTime": { + "end_time": "2021-09-20T12:08:26.284527Z", + "start_time": "2021-09-20T12:08:06.953858Z" + } + }, + "outputs": [], + "source": [ + "# function that returns a qubit spectroscopy experiment- accepts frequency sweep range as parameter\n", + "def qubit_spectroscopy_parallel(\n", + " qubits, qspec_range=100e6, qspec_num=1001, num_averages=2**10\n", + "):\n", + " # Create qubit spectroscopy Experiment - uses qubit drive, readout drive and data acquisition lines\n", + " exp_qspec = Experiment(\n", + " uid=\"Qubit Spectroscopy\",\n", + " signals=[\n", + " signal\n", + " for signal_list in [\n", + " [\n", + " ExperimentSignal(\n", + " f\"drive_{qubit.uid}\", map_to=qubit.signals[\"drive\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"measure_{qubit.uid}\", map_to=qubit.signals[\"measure\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"acquire_{qubit.uid}\", map_to=qubit.signals[\"acquire\"]\n", + " ),\n", + " ]\n", + " for qubit in qubits\n", + " ]\n", + " for signal in signal_list\n", + " ],\n", + " )\n", + "\n", + " # List of frequency sweeps for all qubits\n", + " qubit_frequency_sweeps = [\n", + " LinearSweepParameter(\n", + " uid=f\"{qubit.uid}_spectroscopy_sweep\",\n", + " start=qubit.parameters.drive_frequency_ge - qspec_range / 2,\n", + " stop=qubit.parameters.drive_frequency_ge + qspec_range / 2,\n", + " count=qspec_num,\n", + " )\n", + " for qubit in my_qubits\n", + " ]\n", + "\n", + " # inner loop - real-time averaging - QA in integration mode\n", + " with exp_qspec.acquire_loop_rt(\n", + " uid=\"freq_shots\",\n", + " count=num_averages,\n", + " acquisition_type=AcquisitionType.INTEGRATION,\n", + " ):\n", + " with exp_qspec.sweep(\n", + " uid=\"qubit_frequency_sweep\", parameter=qubit_frequency_sweeps\n", + " ):\n", + " for qubit in qubits:\n", + " # qubit drive\n", + " with exp_qspec.section(uid=f\"{qubit.uid}_excitation\"):\n", + " exp_qspec.play(\n", + " signal=f\"drive_{qubit.uid}\",\n", + " pulse=qubit_spectroscopy_pulse(qubit),\n", + " )\n", + " # measurement\n", + " with exp_qspec.section(\n", + " uid=f\"readout_{qubit.uid}\", play_after=f\"{qubit.uid}_excitation\"\n", + " ):\n", + " exp_qspec.measure(\n", + " measure_signal=f\"measure_{qubit.uid}\",\n", + " measure_pulse=readout_pulse(qubit),\n", + " handle=f\"{qubit.uid}_spectroscopy\",\n", + " acquire_signal=f\"acquire_{qubit.uid}\",\n", + " integration_kernel=integration_kernel(qubit),\n", + " reset_delay=qubit.parameters.user_defined[\"reset_delay_length\"],\n", + " )\n", + "\n", + " cal = Calibration()\n", + " for it, qubit in enumerate(qubits):\n", + " cal[f\"drive_{qubit.uid}\"] = SignalCalibration(\n", + " oscillator=Oscillator(\n", + " frequency=qubit_frequency_sweeps[it],\n", + " modulation_type=ModulationType.HARDWARE,\n", + " )\n", + " )\n", + " exp_qspec.set_calibration(cal)\n", + "\n", + " return exp_qspec" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "46bf613c-2f03-4a02-8bc0-1201b845468a", + "metadata": {}, + "source": [ + "### 3.2.2 Run and Evaluate Experiment for all Qubits in parallel\n", + "\n", + "Runs the experiment and evaluates the data returned by the measurement" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "814e16f2", + "metadata": {}, + "outputs": [], + "source": [ + "qubit_spectroscopy_exp = qubit_spectroscopy_parallel(my_qubits)\n", + "\n", + "compiled_qubit_spectroscopy_exp = session.compile(qubit_spectroscopy_exp)\n", + "qubit_spectroscopy_results = session.run(compiled_qubit_spectroscopy_exp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ebd0298e", + "metadata": {}, + "outputs": [], + "source": [ + "# save results to database\n", + "my_results_db.store(\n", + " data=qubit_spectroscopy_results,\n", + " key=f\"qubit_spectroscopy_results_{datetime.datetime.now()}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4011a141", + "metadata": {}, + "outputs": [], + "source": [ + "# access and plot results of one drive frequency sweep\n", + "index = 0\n", + "data_qubit = my_qubits[index]\n", + "\n", + "qubit_data = qubit_spectroscopy_results.get_data(f\"{data_qubit.uid}_spectroscopy\")\n", + "qubit_freq = (\n", + " qubit_spectroscopy_results.get_axis(f\"{data_qubit.uid}_spectroscopy\")[0][0]\n", + " + qubit.parameters.drive_lo_frequency\n", + ")\n", + "\n", + "plt.plot(qubit_freq, qubit_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6da7a30b", + "metadata": {}, + "outputs": [], + "source": [ + "# Do analysis of data here" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "a432798b", + "metadata": {}, + "source": [ + "### 3.2.3 Update Calibration and save to database " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dc46de78", + "metadata": {}, + "outputs": [], + "source": [ + "# update qubit parameters from analysis - here: qubit resonance frquency\n", + "# for qubit in my_qubits:\n", + "# qubit.parameters.resonance_frequency_ge = my_new_frequency\n", + "# device_setup.set_calibration(qubit.calibration())\n", + "\n", + "\n", + "# store new device setup including calibration in database\n", + "my_setup_db.store(\n", + " data=device_setup,\n", + " # use same key to overwrite previous device setup or different to store individual instances\n", + " key=\"device_setup_Qubit_Spectroscopy\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "023ba502-7f83-49b2-a208-6207d54793f4", + "metadata": { + "tags": [] + }, + "source": [ + "## 3.3 Amplitude Rabi Experiment - in parallel\n", + "\n", + "Sweep the pulse amplitude of a qubit drive pulse to determine the ideal amplitudes for specific qubit rotation angles" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "bb70a8d7-22df-4396-a167-d1ac4a5277c1", + "metadata": { + "tags": [] + }, + "source": [ + "### 3.3.1 Experiment Definition\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "609b7c27", + "metadata": {}, + "outputs": [], + "source": [ + "def drive_ge_rabi(qubit):\n", + " return pulse_library.drag(\n", + " uid=f\"drag_pulse_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " sigma=0.4,\n", + " beta=0.2,\n", + " amplitude=1,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9bab6736", + "metadata": { + "ExecuteTime": { + "end_time": "2021-09-20T12:14:39.604236Z", + "start_time": "2021-09-20T12:14:27.154245Z" + } + }, + "outputs": [], + "source": [ + "# function that returns an amplitude Rabi experiment\n", + "def amplitude_rabi_parallel(qubits, amplitude_sweep, num_averages=2**10):\n", + " exp_rabi = Experiment(\n", + " uid=\"Qubit Spectroscopy\",\n", + " signals=[\n", + " signal\n", + " for signal_list in [\n", + " [\n", + " ExperimentSignal(\n", + " f\"drive_{qubit.uid}\", map_to=qubit.signals[\"drive\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"measure_{qubit.uid}\", map_to=qubit.signals[\"measure\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"acquire_{qubit.uid}\", map_to=qubit.signals[\"acquire\"]\n", + " ),\n", + " ]\n", + " for qubit in qubits\n", + " ]\n", + " for signal in signal_list\n", + " ],\n", + " )\n", + "\n", + " ## define Rabi experiment pulse sequence\n", + " # outer loop - real-time, cyclic averaging\n", + " with exp_rabi.acquire_loop_rt(\n", + " uid=\"rabi_shots\",\n", + " count=num_averages,\n", + " averaging_mode=AveragingMode.CYCLIC,\n", + " acquisition_type=AcquisitionType.INTEGRATION,\n", + " ):\n", + " # inner loop - real time sweep of Rabi ampitudes\n", + " with exp_rabi.sweep(uid=\"rabi_sweep\", parameter=amplitude_sweep):\n", + " for qubit in qubits:\n", + " # qubit drive\n", + " with exp_rabi.section(\n", + " uid=f\"{qubit.uid}_excitation\", alignment=SectionAlignment.RIGHT\n", + " ):\n", + " exp_rabi.play(\n", + " signal=f\"drive_{qubit.uid}\",\n", + " pulse=drive_ge_rabi(qubit),\n", + " amplitude=amplitude_sweep,\n", + " )\n", + " # measurement\n", + " with exp_rabi.section(\n", + " uid=f\"readout_{qubit.uid}\", play_after=f\"{qubit.uid}_excitation\"\n", + " ):\n", + " exp_rabi.measure(\n", + " measure_signal=f\"measure_{qubit.uid}\",\n", + " measure_pulse=readout_pulse(qubit),\n", + " handle=f\"{qubit.uid}_rabi\",\n", + " acquire_signal=f\"acquire_{qubit.uid}\",\n", + " integration_kernel=integration_kernel(qubit),\n", + " reset_delay=qubit.parameters.user_defined[\"reset_delay_length\"],\n", + " )\n", + "\n", + " return exp_rabi" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "1dc2ff96-f8ff-4de5-b62a-872108c7814f", + "metadata": { + "tags": [] + }, + "source": [ + "### 3.3.2 Execute experiment and analyze results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "adac1fa4", + "metadata": {}, + "outputs": [], + "source": [ + "rabi_exp = amplitude_rabi_parallel(\n", + " my_qubits,\n", + " LinearSweepParameter(uid=\"amplitude_sweep\", start=0, stop=1, count=201),\n", + ")\n", + "\n", + "compiled_rabi_exp = session.compile(rabi_exp)\n", + "rabi_results = session.run(compiled_rabi_exp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "108ce4b8", + "metadata": {}, + "outputs": [], + "source": [ + "# save results to database\n", + "my_results_db.store(\n", + " data=rabi_results,\n", + " key=f\"rabi_results_{datetime.datetime.now()}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "de982d69", + "metadata": {}, + "outputs": [], + "source": [ + "# access and plot results of one drive frequency sweep\n", + "index = 0\n", + "data_qubit = my_qubits[index]\n", + "\n", + "qubit_data = rabi_results.get_data(f\"{data_qubit.uid}_rabi\")\n", + "qubit_amp = rabi_results.get_axis(f\"{data_qubit.uid}_rabi\")[0]\n", + "\n", + "plt.plot(qubit_amp, qubit_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c534d842", + "metadata": {}, + "outputs": [], + "source": [ + "# plot all results\n", + "plot_results(rabi_results)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1c59a2d4", + "metadata": {}, + "outputs": [], + "source": [ + "# Do analysis of data here" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "ba9b96a6", + "metadata": {}, + "source": [ + "### 3.3.3 Update Calibration and save to database " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "989c129a", + "metadata": {}, + "outputs": [], + "source": [ + "# update qubit parameters from analysis - here: qubit pulse amplitude\n", + "# for qubit in my_qubits:\n", + "# qubit.parameters.user_defined[\"amplitude_pi\"] = my_amplitude\n", + "\n", + "# store qubits including their parameters in database\n", + "for qubit in my_qubits:\n", + " my_setup_db.store(\n", + " data=qubit,\n", + " # use same key to overwrite previous device setup or different to store individual instances\n", + " key=f\"qubit_{qubit.uid}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + " )" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "71848b75", + "metadata": {}, + "source": [ + "## 3.4 Ramsey Experiment - in parallel\n", + "The Ramsey experiment is different from the experiments above as the length of the drive section changes. Using a right-aligned sweep section and the automatic repetition time makes sure that the experiment is run as efficiently as possible on the Zurich Instruments hardware." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "7fa365d3", + "metadata": {}, + "source": [ + "### 3.4.1 Experiment Definition" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4585a1b0", + "metadata": {}, + "outputs": [], + "source": [ + "# define ramsey drive pulse - use calibration from Rabi experiment\n", + "def drive_ge_pi_half(qubit):\n", + " return pulse_library.drag(\n", + " uid=f\"ramsey_drive_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " sigma=0.4,\n", + " beta=0.2,\n", + " amplitude=qubit.parameters.user_defined[\"amplitude_pi\"] / 2,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f8d1e77b", + "metadata": {}, + "outputs": [], + "source": [ + "# function that returns an amplitude Rabi experiment\n", + "def ramsey_parallel(qubits, delay_sweep, num_averages=2**10):\n", + " exp_ramsey = Experiment(\n", + " uid=\"Qubit Spectroscopy\",\n", + " signals=[\n", + " signal\n", + " for signal_list in [\n", + " [\n", + " ExperimentSignal(\n", + " f\"drive_{qubit.uid}\", map_to=qubit.signals[\"drive\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"measure_{qubit.uid}\", map_to=qubit.signals[\"measure\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"acquire_{qubit.uid}\", map_to=qubit.signals[\"acquire\"]\n", + " ),\n", + " ]\n", + " for qubit in qubits\n", + " ]\n", + " for signal in signal_list\n", + " ],\n", + " )\n", + "\n", + " ## define Ramsey experiment pulse sequence\n", + " # outer loop - real-time, cyclic averaging\n", + " with exp_ramsey.acquire_loop_rt(\n", + " uid=\"ramsey_shots\",\n", + " count=num_averages,\n", + " averaging_mode=AveragingMode.CYCLIC,\n", + " acquisition_type=AcquisitionType.INTEGRATION,\n", + " repetition_mode=RepetitionMode.AUTO,\n", + " ):\n", + " # inner loop - real time sweep of Ramsey time delays\n", + " with exp_ramsey.sweep(\n", + " uid=\"ramsey_sweep\", parameter=delay_sweep, alignment=SectionAlignment.RIGHT\n", + " ):\n", + " for qubit in qubits:\n", + " # play qubit excitation pulse - pulse amplitude is swept\n", + " ramsey_pulse = drive_ge_pi_half(qubit)\n", + " with exp_ramsey.section(\n", + " uid=f\"{qubit.uid}_excitation\", alignment=SectionAlignment.RIGHT\n", + " ):\n", + " exp_ramsey.play(signal=f\"drive_{qubit.uid}\", pulse=ramsey_pulse)\n", + " exp_ramsey.delay(signal=f\"drive_{qubit.uid}\", time=delay_sweep)\n", + " exp_ramsey.play(signal=f\"drive_{qubit.uid}\", pulse=ramsey_pulse)\n", + " # readout pulse and data acquisition\n", + " # measurement\n", + " with exp_ramsey.section(\n", + " uid=f\"readout_{qubit.uid}\", play_after=f\"{qubit.uid}_excitation\"\n", + " ):\n", + " exp_ramsey.measure(\n", + " measure_signal=f\"measure_{qubit.uid}\",\n", + " measure_pulse=readout_pulse(qubit),\n", + " handle=f\"{qubit.uid}_ramsey\",\n", + " acquire_signal=f\"acquire_{qubit.uid}\",\n", + " integration_kernel=integration_kernel(qubit),\n", + " reset_delay=qubit.parameters.user_defined[\"reset_delay_length\"],\n", + " )\n", + "\n", + " return exp_ramsey" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "45f3ecb7", + "metadata": {}, + "source": [ + "### 3.4.2 Execute experiment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0c86a901", + "metadata": {}, + "outputs": [], + "source": [ + "ramsey_exp = ramsey_parallel(\n", + " my_qubits,\n", + " LinearSweepParameter(\n", + " uid=\"ramsey_delay_sweep\",\n", + " start=0,\n", + " stop=15e-6,\n", + " count=201,\n", + " ),\n", + ")\n", + "\n", + "compiled_ramsey_exp = session.compile(ramsey_exp)\n", + "ramsey_results = session.run(compiled_ramsey_exp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "720dced2", + "metadata": {}, + "outputs": [], + "source": [ + "# save results to database\n", + "my_results_db.store(\n", + " data=ramsey_results,\n", + " key=\"ramsey_results\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "83051cf1", + "metadata": {}, + "outputs": [], + "source": [ + "# access and plot results of one drive frequency sweep\n", + "index = 0\n", + "data_qubit = my_qubits[index]\n", + "\n", + "qubit_data = ramsey_results.get_data(f\"{data_qubit.uid}_ramsey\")\n", + "qubit_delay = ramsey_results.get_axis(f\"{data_qubit.uid}_ramsey\")[0]\n", + "\n", + "plt.plot(qubit_delay, qubit_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4eaa8ada", + "metadata": {}, + "outputs": [], + "source": [ + "# plot all results\n", + "plot_results(ramsey_results)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35de4023", + "metadata": {}, + "outputs": [], + "source": [ + "# Do analysis of data here" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "3d0ef303", + "metadata": {}, + "source": [ + "### 3.4.3 Update Qubit parameters and save to database " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "aa4e65a0", + "metadata": {}, + "outputs": [], + "source": [ + "# update qubit parameters from analysis - here: qubit dephasing time\n", + "# for qubit in my_qubits:\n", + "# qubit.parameters.user_defined[\"t2_time\"] = my_t2\n", + "\n", + "# store qubits including their parameters in database\n", + "for qubit in my_qubits:\n", + " my_setup_db.store(\n", + " data=qubit,\n", + " # use same key to overwrite previous device setup or different to store individual instances\n", + " key=f\"qubit_{qubit.uid}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + " )" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "fbcaa9fc", + "metadata": {}, + "source": [ + "## 3.5 T1 Experiment - in parallel\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "23c1610f", + "metadata": {}, + "source": [ + "### 3.5.1 Experiment Definition" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c0397dc2", + "metadata": {}, + "outputs": [], + "source": [ + "# define drive pulse - use calibration from Rabi experiment\n", + "def drive_ge_pi(qubit):\n", + " return pulse_library.drag(\n", + " uid=f\"drive_{qubit.uid}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " sigma=0.4,\n", + " beta=0.2,\n", + " amplitude=qubit.parameters.user_defined[\"amplitude_pi\"],\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5e6139ac", + "metadata": {}, + "outputs": [], + "source": [ + "# function that returns an amplitude Rabi experiment\n", + "def t1_parallel(qubits, delay_sweep, num_averages=2**10):\n", + " exp_t1 = Experiment(\n", + " uid=\"Qubit Spectroscopy\",\n", + " signals=[\n", + " signal\n", + " for signal_list in [\n", + " [\n", + " ExperimentSignal(\n", + " f\"drive_{qubit.uid}\", map_to=qubit.signals[\"drive\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"measure_{qubit.uid}\", map_to=qubit.signals[\"measure\"]\n", + " ),\n", + " ExperimentSignal(\n", + " f\"acquire_{qubit.uid}\", map_to=qubit.signals[\"acquire\"]\n", + " ),\n", + " ]\n", + " for qubit in qubits\n", + " ]\n", + " for signal in signal_list\n", + " ],\n", + " )\n", + "\n", + " ## define Ramsey experiment pulse sequence\n", + " # outer loop - real-time, cyclic averaging\n", + " with exp_t1.acquire_loop_rt(\n", + " uid=\"t1_shots\",\n", + " count=num_averages,\n", + " averaging_mode=AveragingMode.CYCLIC,\n", + " acquisition_type=AcquisitionType.INTEGRATION,\n", + " repetition_mode=RepetitionMode.AUTO,\n", + " ):\n", + " # inner loop - real time sweep of Ramsey time delays\n", + " with exp_t1.sweep(\n", + " uid=\"t1_delay_sweep\",\n", + " parameter=delay_sweep,\n", + " alignment=SectionAlignment.RIGHT,\n", + " ):\n", + " for qubit in qubits:\n", + " # play qubit excitation pulse - pulse amplitude is swept\n", + " with exp_t1.section(\n", + " uid=f\"{qubit.uid}_excitation\", alignment=SectionAlignment.RIGHT\n", + " ):\n", + " exp_t1.play(signal=f\"drive_{qubit.uid}\", pulse=drive_ge_pi(qubit))\n", + " exp_t1.delay(signal=f\"drive_{qubit.uid}\", time=delay_sweep)\n", + " # readout pulse and data acquisition\n", + " # measurement\n", + " with exp_t1.section(\n", + " uid=f\"readout_{qubit.uid}\", play_after=f\"{qubit.uid}_excitation\"\n", + " ):\n", + " exp_t1.measure(\n", + " measure_signal=f\"measure_{qubit.uid}\",\n", + " measure_pulse=readout_pulse(qubit),\n", + " handle=f\"{qubit.uid}_t1\",\n", + " acquire_signal=f\"acquire_{qubit.uid}\",\n", + " integration_kernel=integration_kernel(qubit),\n", + " reset_delay=qubit.parameters.user_defined[\"reset_delay_length\"],\n", + " )\n", + "\n", + " return exp_t1" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "59992477", + "metadata": {}, + "source": [ + "### 3.5.2 Execute experiment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d43a6571", + "metadata": {}, + "outputs": [], + "source": [ + "t1_exp = t1_parallel(\n", + " my_qubits,\n", + " LinearSweepParameter(uid=\"t1_delay_sweep\", start=0, stop=50e-6, count=201),\n", + ")\n", + "\n", + "compiled_t1_exp = session.compile(t1_exp)\n", + "t1_results = session.run(compiled_t1_exp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "41cd4c3f", + "metadata": {}, + "outputs": [], + "source": [ + "# save results to database\n", + "my_results_db.store(\n", + " data=t1_results,\n", + " key=f\"t1_results_{datetime.datetime.now()}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9d71ce3d", + "metadata": {}, + "outputs": [], + "source": [ + "# access and plot results of one drive frequency sweep\n", + "index = 0\n", + "data_qubit = my_qubits[index]\n", + "\n", + "qubit_data = t1_results.get_data(f\"{data_qubit.uid}_t1\")\n", + "qubit_delay = t1_results.get_axis(f\"{data_qubit.uid}_t1\")[0]\n", + "\n", + "plt.plot(qubit_delay, qubit_data)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3be9a88", + "metadata": {}, + "outputs": [], + "source": [ + "# plot all results\n", + "plot_results(t1_results)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "687865e3", + "metadata": {}, + "outputs": [], + "source": [ + "# Do analysis of data here" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "688da0dc", + "metadata": {}, + "source": [ + "### 3.5.3 Update Qubit parameters and save to database " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d869fbe0", + "metadata": {}, + "outputs": [], + "source": [ + "# update qubit parameters from analysis - here: qubit relaxation time\n", + "# for qubit in my_qubits:\n", + "# qubit.parameters.user_defined[\"t1_time\"] = my_t1\n", + "\n", + "# store qubits including their parameters in database\n", + "for qubit in my_qubits:\n", + " my_setup_db.store(\n", + " data=qubit,\n", + " # use same key to overwrite previous device setup or different to store individual instances\n", + " key=f\"qubit_{qubit.uid}\",\n", + " metadata={\"creation_date\": datetime.datetime.now()},\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1fe173ed", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "hide_input": false, + "kernelspec": { + "display_name": "2.9", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": { + "height": "calc(100% - 180px)", + "left": "10px", + "top": "150px", + "width": "409.6px" + }, + "toc_section_display": true, + "toc_window_display": true + }, + "varInspector": { + "cols": { + "lenName": 16, + "lenType": 16, + "lenVar": 40 + }, + "kernels_config": { + "python": { + "delete_cmd_postfix": "", + "delete_cmd_prefix": "del ", + "library": "var_list.py", + "varRefreshCmd": "print(var_dic_list())" + }, + "r": { + "delete_cmd_postfix": ") ", + "delete_cmd_prefix": "rm(", + "library": "var_list.r", + "varRefreshCmd": "cat(var_dic_list()) " + } + }, + "types_to_exclude": [ + "module", + "function", + "builtin_function_or_method", + "instance", + "_Feature" + ], + "window_display": false + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/04_spin_qubits/00_user_function_sweeps.ipynb b/examples/04_spin_qubits/00_user_function_sweeps.ipynb index f5af2b5..147d2a4 100644 --- a/examples/04_spin_qubits/00_user_function_sweeps.ipynb +++ b/examples/04_spin_qubits/00_user_function_sweeps.ipynb @@ -6,7 +6,7 @@ "id": "46cfbe23", "metadata": {}, "source": [ - "# Sweeps with Callback Functions in LabOne Q" + "# Sweeps with Callback Functions" ] }, { @@ -83,7 +83,7 @@ " server_host=\"your_ip_address\",\n", " server_port=8004,\n", " setup_name=\"MySetup\",\n", - ")\n" + ")" ] }, { @@ -112,7 +112,7 @@ "source": [ "# create and connect to session\n", "session = Session(device_setup=device_setup)\n", - "session.connect(do_emulation=True)\n" + "session.connect(do_emulation=True)" ] }, { @@ -152,7 +152,7 @@ " with exp.sweep(uid=\"inner_sweep\", parameter=inner_sweep_parameter):\n", " # use user function\n", " exp.call(\"setFastAxis\", param=inner_sweep_parameter)\n", - " exp.call(\"readMFLI\", settling_time=0.1)\n" + " exp.call(\"readMFLI\", settling_time=0.1)" ] }, { @@ -226,7 +226,7 @@ "# print(f\"Columns: {daq_module.grid.cols()}\")\n", "# print(f\"Rows: {daq_module.grid.rows()}\")\n", "# print(f\"Repetitions: {daq_module.grid.repetitions()}\")\n", - "# print(f\"Holdoff: {daq_module.holdoff.time()}\")\n" + "# print(f\"Holdoff: {daq_module.holdoff.time()}\")" ] }, { @@ -282,7 +282,7 @@ "\n", "\n", "def setFastAxis(session, param):\n", - " print(f\"inner: {int(param)}\")\n" + " print(f\"inner: {int(param)}\")" ] }, { @@ -302,7 +302,7 @@ "source": [ "session.register_user_function(setSlowAxis, \"setSlowAxis\")\n", "session.register_user_function(setFastAxis, \"setFastAxis\")\n", - "session.register_user_function(readMFLI, \"readMFLI\")\n" + "session.register_user_function(readMFLI, \"readMFLI\")" ] }, { @@ -318,7 +318,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_results = session.run(exp)\n" + "my_results = session.run(exp)" ] }, { @@ -348,7 +348,7 @@ " plt.legend(loc=\"best\", fontsize=8)\n", " plt.title(\"MFLI time traces of demodulated data\")\n", "else:\n", - " print(\"Emulation - nothing to plot\")\n" + " print(\"Emulation - nothing to plot\")" ] } ], diff --git a/examples/05_color_centers/00_shfsg_basic_experiments.ipynb b/examples/05_color_centers/00_shfsg_basic_experiments.ipynb index aafa63f..d97dfa0 100644 --- a/examples/05_color_centers/00_shfsg_basic_experiments.ipynb +++ b/examples/05_color_centers/00_shfsg_basic_experiments.ipynb @@ -6,7 +6,7 @@ "id": "d27f7f90", "metadata": {}, "source": [ - "# Color Centers: Basic Experiments" + "# Color Centers - Basic Experiments" ] }, { @@ -104,7 +104,7 @@ " \"lo_shfsg_1_2\",\n", " frequency=2.9e9,\n", " )\n", - " \n", + "\n", " lo_shfsg_3 = Oscillator(\n", " \"lo_shfsg_3\",\n", " frequency=0,\n", @@ -121,7 +121,7 @@ " local_oscillator=lo_shfsg_1_2,\n", " range=10,\n", " )\n", - " \n", + "\n", " device_setup.logical_signal_groups[\"q0\"].logical_signals[\n", " \"drive_AOM_line\"\n", " ].calibration = SignalCalibration(\n", @@ -133,7 +133,7 @@ " port_mode=PortMode.LF,\n", " local_oscillator=lo_shfsg_3,\n", " range=5,\n", - " )\n" + " )" ] }, { @@ -182,9 +182,11 @@ " ports: SGCHANNELS/2/OUTPUT\n", "\"\"\"\n", "\n", - "def create_device_setup(descriptor=descriptor_shfsg_nv,\n", - " serverhost=\"localhost\",\n", - " ):\n", + "\n", + "def create_device_setup(\n", + " descriptor=descriptor_shfsg_nv,\n", + " serverhost=\"localhost\",\n", + "):\n", " device_setup = DeviceSetup.from_descriptor(\n", " yaml_text=descriptor,\n", " server_host=serverhost,\n", @@ -192,7 +194,7 @@ " setup_name=\"SHFSG_Standalone\",\n", " )\n", " calibrate_devices(device_setup)\n", - " return device_setup\n" + " return device_setup" ] }, { @@ -263,7 +265,7 @@ "\n", "# trigger\n", "Trigger_Pulse_length = 250e-9\n", - "AOM_Pulse_length = 3e-6 + Trigger_Pulse_length" + "AOM_Pulse_length = 3e-6 + Trigger_Pulse_length" ] }, { @@ -302,7 +304,7 @@ "n_average = 10\n", "\n", "# square pulse to excite the qubit\n", - "pulse_length=500e-9\n", + "pulse_length = 500e-9\n", "square_pulse = pulse_library.const(uid=\"const_iq\", length=pulse_length, amplitude=0.89)" ] }, @@ -342,33 +344,37 @@ " ):\n", " # qubit drive\n", " with exp_nv.sweep(uid=\"qfreq_sweep\", parameter=freq_sweep):\n", - " with exp_nv.section(\n", - " uid=\"parent_section\", alignment=SectionAlignment.LEFT\n", - " ):\n", + " with exp_nv.section(uid=\"parent_section\", alignment=SectionAlignment.LEFT):\n", " # shine the laser. Here, the AOM line is used only to send a trigger to the laser\n", - " with exp_nv.section(uid=\"excitation\", length=AOM_Pulse_length, trigger={\"AOM\" : {\"state\" : 1}}) as AOM:\n", - " exp_nv.reserve(signal=\"AOM\") \n", - " \n", + " with exp_nv.section(\n", + " uid=\"excitation\",\n", + " length=AOM_Pulse_length,\n", + " trigger={\"AOM\": {\"state\": 1}},\n", + " ) as AOM:\n", + " exp_nv.reserve(signal=\"AOM\")\n", + "\n", " # excite the state using the drive line\n", - " with exp_nv.section(uid=\"manipulation\",\n", - " play_after=\"excitation\"):\n", + " with exp_nv.section(uid=\"manipulation\", play_after=\"excitation\"):\n", " exp_nv.reserve(signal=\"AOM\")\n", " exp_nv.play(signal=\"drive\", pulse=square_pulse)\n", - " \n", + "\n", " # shine laser again\n", " exp_nv.add(AOM)\n", - " \n", + "\n", " # start DAQ trigger\n", - " with exp_nv.section(uid=\"trigger\", length=Trigger_Pulse_length, \n", - " play_after=\"manipulation\", \n", - " trigger={\"drive\" : {\"state\" : 1}}):\n", + " with exp_nv.section(\n", + " uid=\"trigger\",\n", + " length=Trigger_Pulse_length,\n", + " play_after=\"manipulation\",\n", + " trigger={\"drive\": {\"state\": 1}},\n", + " ):\n", " exp_nv.reserve(signal=\"drive\")\n", - " \n", + "\n", " # delay next average\n", " with exp_nv.section(uid=\"delay\"):\n", " exp_nv.reserve(signal=\"AOM\")\n", " exp_nv.delay(signal=\"drive\", time=1e-6)\n", - " return exp_nv\n" + " return exp_nv" ] }, { @@ -408,7 +414,7 @@ "q0_map = {\n", " \"drive\": q0[\"drive_line\"],\n", " \"AOM\": q0[\"drive_AOM_line\"],\n", - "}\n" + "}" ] }, { @@ -472,7 +478,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(compiled_qspec, 0, length=50e-6, plot_width=25)\n" + "plot_simulation(compiled_qspec, 0, length=50e-6, plot_width=25)" ] }, { @@ -526,18 +532,17 @@ "source": [ "## define length sweep parameter\n", "length_sweep_parameter = LinearSweepParameter(\n", - " uid=\"length_sweep\", \n", - " start=0,\n", - " stop=3e-6,\n", - " count=20\n", + " uid=\"length_sweep\", start=0, stop=3e-6, count=20\n", ")\n", "\n", "drive_pulse = pulse_library.const(\n", - " uid=\"rabi_drive_pulse\", length=100e-9, amplitude=1,\n", - " can_compress=True, # <-- pulse can be compressed by the compiler!\n", + " uid=\"rabi_drive_pulse\",\n", + " length=100e-9,\n", + " amplitude=1,\n", + " can_compress=True, # <-- pulse can be compressed by the compiler!\n", ")\n", "\n", - "n_average=10\n" + "n_average = 10" ] }, { @@ -569,40 +574,40 @@ "\n", "# define experiment\n", "with exp_nv_rabi.acquire_loop_rt(\n", - " uid=\"shots\", \n", - " count=pow(2, n_average),\n", - " averaging_mode=AveragingMode.CYCLIC\n", + " uid=\"shots\", count=pow(2, n_average), averaging_mode=AveragingMode.CYCLIC\n", "):\n", " with exp_nv_rabi.sweep(parameter=length_sweep_parameter):\n", - " \n", - " \n", " with exp_nv_rabi.section(\n", - " uid=\"excitation\", length=AOM_Pulse_length, trigger={\"AOM\" : {\"state\" : 1}}\n", + " uid=\"excitation\", length=AOM_Pulse_length, trigger={\"AOM\": {\"state\": 1}}\n", " ) as AOM:\n", " exp_nv_rabi.reserve(signal=\"AOM\")\n", - " \n", + "\n", " # sweep length of the pulse used for manipulation\n", " with exp_nv_rabi.section(\n", - " uid=\"manipulation\", alignment=SectionAlignment.LEFT,\n", + " uid=\"manipulation\",\n", + " alignment=SectionAlignment.LEFT,\n", " play_after=\"excitation\",\n", " ):\n", - " exp_nv_rabi.reserve(signal=\"AOM\") \n", - " exp_nv_rabi.play(signal=\"drive\", pulse=drive_pulse, \n", - " length=length_sweep_parameter, # <--- sweep parameter overloads the length!\n", - " )\n", - " \n", - " \n", + " exp_nv_rabi.reserve(signal=\"AOM\")\n", + " exp_nv_rabi.play(\n", + " signal=\"drive\",\n", + " pulse=drive_pulse,\n", + " length=length_sweep_parameter, # <--- sweep parameter overloads the length!\n", + " )\n", + "\n", " exp_nv_rabi.add(AOM)\n", - " \n", - " \n", + "\n", " # other way to manipulate the trigger: markers completely synchronized with a waveform!\n", - " with exp_nv_rabi.section(uid=\"trigger\", play_after=\"manipulation\"): \n", - " exp_nv_rabi.play(signal=\"drive\", pulse=None, amplitude=0.01, \n", - " marker={\"marker1\" : { \"start\" : 0, \"length\" : Trigger_Pulse_length}}) #<----- Markers are used here instead\n", - " \n", - " \n", + " with exp_nv_rabi.section(uid=\"trigger\", play_after=\"manipulation\"):\n", + " exp_nv_rabi.play(\n", + " signal=\"drive\",\n", + " pulse=None,\n", + " amplitude=0.01,\n", + " marker={\"marker1\": {\"start\": 0, \"length\": Trigger_Pulse_length}},\n", + " ) # <----- Markers are used here instead\n", + "\n", " with exp_nv_rabi.section(uid=\"delay\"):\n", - " exp_nv_rabi.delay(signal=\"AOM\", time=3e-6)\n" + " exp_nv_rabi.delay(signal=\"AOM\", time=3e-6)" ] }, { @@ -625,7 +630,7 @@ "map_q0 = {\n", " \"drive\": q0[\"drive_line\"],\n", " \"AOM\": q0[\"drive_AOM_line\"],\n", - "}\n" + "}" ] }, { @@ -648,7 +653,7 @@ "exp_nv_rabi.set_signal_map(map_q0)\n", "\n", "compiled_rabi = my_session.compile(exp_nv_rabi)\n", - "rabi_results = my_session.run(compiled_rabi)\n" + "rabi_results = my_session.run(compiled_rabi)" ] }, { @@ -667,7 +672,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(compiled_rabi, length=160e-6, plot_width=25)\n" + "plot_simulation(compiled_rabi, length=160e-6, plot_width=25)" ] }, { @@ -735,7 +740,7 @@ " ExperimentSignal(\"drive\"),\n", " ExperimentSignal(\"AOM\"),\n", " ],\n", - " ) \n", + " )\n", "\n", " # Connect experiment signals to logical signals\n", " exp.map_signal(\"drive\", lsg[\"drive_line\"])\n", @@ -758,14 +763,11 @@ " parameter=sweep_parameter,\n", " reset_oscillator_phase=False,\n", " ):\n", - " \n", " with exp.section(\n", - " uid=\"excitation\",\n", - " length=AOM_Pulse_length,\n", - " trigger={\"AOM\" : {\"state\" : 1}}\n", + " uid=\"excitation\", length=AOM_Pulse_length, trigger={\"AOM\": {\"state\": 1}}\n", " ) as AOM:\n", " exp.reserve(signal=\"AOM\")\n", - " \n", + "\n", " with exp.section(\n", " uid=\"manipulation\",\n", " length=stop + 2 * x90.length,\n", @@ -777,16 +779,17 @@ " exp.play(signal=\"drive\", pulse=x90)\n", " exp.delay(signal=\"drive\", time=sweep_parameter)\n", " exp.play(signal=\"drive\", pulse=x90)\n", - " \n", - " \n", + "\n", " exp.add(AOM)\n", - " \n", - " \n", - " with exp.section(uid=\"DAQ trigger\", length=Trigger_Pulse_length,\n", - " trigger={\"drive\" : {\"state\" : 1}}):\n", + "\n", + " with exp.section(\n", + " uid=\"DAQ trigger\",\n", + " length=Trigger_Pulse_length,\n", + " trigger={\"drive\": {\"state\": 1}},\n", + " ):\n", " exp.reserve(signal=\"drive\")\n", - " \n", - " return exp\n" + "\n", + " return exp" ] }, { @@ -810,7 +813,7 @@ "\n", "compiled_ramsey = my_session.compile(exp_nv_ramsey)\n", "# Run without a specified experiment to use compiled experiment with the compiler settings:\n", - "ramsey_results = my_session.run(compiled_ramsey)\n" + "ramsey_results = my_session.run(compiled_ramsey)" ] }, { @@ -829,7 +832,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(compiled_ramsey, 0e-7, 100e-6, plot_width=25)\n" + "plot_simulation(compiled_ramsey, 0e-7, 100e-6, plot_width=25)" ] }, { diff --git a/examples/06_qasm/01_VQE_Qiskit.ipynb b/examples/06_qasm/01_VQE_Qiskit.ipynb index af8f3e9..15137a8 100755 --- a/examples/06_qasm/01_VQE_Qiskit.ipynb +++ b/examples/06_qasm/01_VQE_Qiskit.ipynb @@ -58,7 +58,7 @@ "from qiskit import QuantumCircuit\n", "from qiskit.circuit.library import TwoLocal\n", "from qiskit.quantum_info import SparsePauliOp\n", - "from qiskit.circuit.classicalregister import ClassicalRegister\n" + "from qiskit.circuit.classicalregister import ClassicalRegister" ] }, { @@ -84,7 +84,7 @@ " server_host=\"my_ip_address\", # ip address of the LabOne dataserver used to communicate with the instruments\n", " server_port=\"8004\", # port number of the dataserver - default is 8004\n", " setup_name=\"QCCS_syste\", # setup name\n", - ")\n" + ")" ] }, { @@ -94,15 +94,16 @@ "metadata": {}, "outputs": [], "source": [ - "q0 = Qubit.from_logical_signal_group(\n", + "q0 = Transmon.from_logical_signal_group(\n", " \"q0\",\n", " lsg=device_setup.logical_signal_groups[\"q0\"],\n", - " parameters=QubitParameters(\n", - " res_frequency=6.15e9,\n", - " lo_frequency=6.1e9,\n", - " readout_res_frequency=6.4e9,\n", - " readout_lo_frequency=6.35e9,\n", - " user_defs={\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.15e9,\n", + " resonance_frequency_ef=5.85e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", " \"amplitude_pi\": 0.5,\n", " \"pulse_length\": 50e-9,\n", " \"readout_len\": 5e-7,\n", @@ -112,15 +113,16 @@ " ),\n", ")\n", "\n", - "q1 = Qubit.from_logical_signal_group(\n", + "q1 = Transmon.from_logical_signal_group(\n", " \"q1\",\n", " lsg=device_setup.logical_signal_groups[\"q1\"],\n", - " parameters=QubitParameters(\n", - " res_frequency=6.25e9,\n", - " lo_frequency=6.1e9,\n", - " readout_res_frequency=6.4e9,\n", - " readout_lo_frequency=6.35e9,\n", - " user_defs={\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.25e9,\n", + " resonance_frequency_ef=5.95e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", " \"amplitude_pi\": 0.6,\n", " \"pulse_length\": 50e-9,\n", " \"readout_len\": 5e-7,\n", @@ -132,16 +134,7 @@ "\n", "qubits = [q0, q1]\n", "for qubit in qubits:\n", - " device_setup.set_calibration(qubit.calibration())\n", - "\n", - "# set local oscillator - not yet part of qubit calibration\n", - "drive_lo = Oscillator(frequency=q0.parameters.lo_frequency)\n", - "device_setup.logical_signal_groups[\"q0\"].logical_signals[\n", - " \"drive_line\"\n", - "].local_oscillator = drive_lo\n", - "device_setup.logical_signal_groups[\"q1\"].logical_signals[\n", - " \"drive_line\"\n", - "].local_oscillator = drive_lo\n" + " device_setup.set_calibration(qubit.calibration())" ] }, { @@ -173,9 +166,9 @@ " \"\"\"\n", " return pulse_library.drag(\n", " uid=f\"{qubit.uid}_{label}\",\n", - " length=qubit.parameters.user_defs[\"pulse_length\"],\n", - " amplitude=qubit.parameters.user_defs[\"amplitude_pi\"],\n", - " )\n" + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " amplitude=qubit.parameters.user_defined[\"amplitude_pi\"],\n", + " )" ] }, { @@ -198,20 +191,20 @@ " Theta is in radians - pulse amplitude is adjusted according to the chosen angle\n", " \"\"\"\n", " gate = Section(uid=id_generator(f\"p_{qubit.uid}_ry_{int(180 * angle / np.pi)}\"))\n", - " amplitude = qubit.parameters.user_defs[\"amplitude_pi\"] * angle / np.pi\n", + " amplitude = qubit.parameters.user_defined[\"amplitude_pi\"] * angle / np.pi\n", " gate.play(\n", " signal=qubit.signals[\"drive\"],\n", " pulse=drive_pulse(\n", " qubit,\n", " \"ry\",\n", - " length=qubit.parameters.user_defs[\"pulse_length\"],\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", " amplitude=amplitude,\n", " ),\n", " phase=np.pi / 2,\n", " )\n", " return gate\n", "\n", - " return ry_gate\n" + " return ry_gate" ] }, { @@ -241,7 +234,7 @@ " )\n", " return gate\n", "\n", - " return rz_gate\n" + " return rz_gate" ] }, { @@ -265,12 +258,12 @@ " \"\"\"\n", " measure_pulse = pulse_library.gaussian_square(\n", " uid=f\"{qubit.uid}_readout_pulse\",\n", - " length=qubit.parameters.user_defs[\"readout_len\"],\n", - " amplitude=qubit.parameters.user_defs[\"readout_amp\"],\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", + " amplitude=qubit.parameters.user_defined[\"readout_amp\"],\n", " )\n", " integration_kernel = pulse_library.const(\n", " uid=f\"{qubit.uid}_integration_kernel\",\n", - " length=qubit.parameters.user_defs[\"readout_len\"],\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", " )\n", "\n", " gate = Section(uid=id_generator(f\"meas_{qubit.uid}_{handle}\"))\n", @@ -283,7 +276,7 @@ " )\n", " return gate\n", "\n", - " return measurement_gate\n" + " return measurement_gate" ] }, { @@ -323,7 +316,7 @@ "\n", " # First cross-resonance component\n", " cancellation_p = Section(\n", - " uid=id_generator(f\"{cx_id}_canc_p\"), play_after=x180_both.uid\n", + " uid=id_generator(f\"{cx_id}_canc_p\"), play_after=x180_both\n", " )\n", " cancellation_p.play(signal=target.signals[\"drive\"], pulse=cancellation_target_p)\n", " cancellation_p.play(\n", @@ -333,14 +326,14 @@ "\n", " # play X pulse on control\n", " x180_control = Section(\n", - " uid=id_generator(f\"{cx_id}_x_q0\"), play_after=cancellation_p.uid\n", + " uid=id_generator(f\"{cx_id}_x_q0\"), play_after=cancellation_p\n", " )\n", " x180_control.play(signal=control.signals[\"drive\"], pulse=x180_pulse_control)\n", " gate.add(x180_control)\n", "\n", " # Second cross-resonance component\n", " cancellation_n = Section(\n", - " uid=id_generator(f\"cx_{cx_id}_canc_n\"), play_after=x180_control.uid\n", + " uid=id_generator(f\"cx_{cx_id}_canc_n\"), play_after=x180_control\n", " )\n", " cancellation_n.play(signal=target.signals[\"drive\"], pulse=cancellation_target_n)\n", " cancellation_n.play(\n", @@ -350,7 +343,7 @@ "\n", " return gate\n", "\n", - " return cx_gate\n" + " return cx_gate" ] }, { @@ -368,6 +361,7 @@ " The reset gate function takes no arguments and returns a LabOne Q section that performs\n", " the reset.\n", " \"\"\"\n", + "\n", " def reset_gate():\n", " sig = qubit.signals\n", " # Reset Section\n", @@ -375,12 +369,14 @@ " # qubit state readout\n", " readout = measurement(qubit)(f\"{qubit.uid}_qubit_state\")\n", " # delay after measurement\n", - " readout.delay(signal=sig[\"acquire\"], time=qubit.parameters.user_defs[\"reset_length\"])\n", + " readout.delay(\n", + " signal=sig[\"acquire\"], time=qubit.parameters.user_defined[\"reset_length\"]\n", + " )\n", " # real-time feedback, fetching the measurement data identified by handle locally from the QA unit of the SHFQC\n", " match_case = Match(\n", " uid=f\"{qubit.uid}_feedback\",\n", " handle=f\"{qubit.uid}_qubit_state\",\n", - " play_after=readout.uid,\n", + " play_after=readout,\n", " )\n", " # measurement result 0 - ground state\n", " case_0 = Case(uid=f\"{qubit.uid}_0_Case\", state=0)\n", @@ -396,7 +392,7 @@ " reset.add(match_case)\n", " return reset\n", "\n", - " return reset_gate\n" + " return reset_gate" ] }, { @@ -426,11 +422,13 @@ " gate_store.register_gate_section(\"ry\", (oq3_qubit,), ry(l1q_qubit))\n", " gate_store.register_gate_section(\"rz\", (oq3_qubit,), rz(l1q_qubit))\n", " gate_store.register_gate_section(\"measure\", (oq3_qubit,), measurement(l1q_qubit))\n", - " gate_store.register_gate_section(\"reset\", (oq3_qubit,), reset(l1q_qubit, drive_pulse(l1q_qubit, \"reset\")))\n", + " gate_store.register_gate_section(\n", + " \"reset\", (oq3_qubit,), reset(l1q_qubit, drive_pulse(l1q_qubit, \"reset\"))\n", + " )\n", "\n", "# Two qubit gates:\n", "\n", - "gate_store.register_gate_section(\"cx\", (\"_all_qubits[0]\", \"_all_qubits[1]\"), cx(q0, q1))\n" + "gate_store.register_gate_section(\"cx\", (\"_all_qubits[0]\", \"_all_qubits[1]\"), cx(q0, q1))" ] }, { @@ -461,7 +459,7 @@ "outputs": [], "source": [ "ansatz = TwoLocal(2, [\"ry\", \"rz\"], \"cx\", \"full\", reps=3, insert_barriers=True)\n", - "ansatz.decompose().draw()\n" + "ansatz.decompose().draw()" ] }, { @@ -497,7 +495,7 @@ "circuit.measure(0, 0)\n", "circuit.measure(1, 1)\n", "\n", - "circuit.decompose().draw()\n" + "circuit.decompose().draw()" ] }, { @@ -519,7 +517,7 @@ "outputs": [], "source": [ "program = q3.dumps(circuit.decompose())\n", - "print(program)\n" + "print(program)" ] }, { @@ -535,7 +533,7 @@ "\n", "my_session = Session(device_setup=device_setup)\n", "my_session.connect(do_emulation=True)\n", - "compiled_exp = my_session.compile(exp)\n" + "compiled_exp = my_session.compile(exp)" ] }, { @@ -554,7 +552,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_simulation(compiled_exp, plot_width=10, plot_height=3)\n" + "plot_simulation(compiled_exp, plot_width=10, plot_height=3)" ] }, { @@ -593,7 +591,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_results = my_session.run(compiled_exp)\n" + "my_results = my_session.run(compiled_exp)" ] }, { @@ -603,7 +601,7 @@ "metadata": {}, "outputs": [], "source": [ - "my_results.acquired_results\n" + "my_results.acquired_results" ] }, { @@ -617,9 +615,9 @@ ], "metadata": { "kernelspec": { - "display_name": "develop", + "display_name": "Python 3.10.5 64-bit ('qccs310')", "language": "python", - "name": "develop" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -631,7 +629,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.11.4" + }, + "vscode": { + "interpreter": { + "hash": "1c85a3b4c5c056083d107184758f94c1c41fe3d42fcac7760a486edd2517f152" + } } }, "nbformat": 4, diff --git a/examples/06_qasm/02_Two_Qubit_RB_Qiskit.ipynb b/examples/06_qasm/02_Two_Qubit_RB_Qiskit.ipynb new file mode 100644 index 0000000..d6194ba --- /dev/null +++ b/examples/06_qasm/02_Two_Qubit_RB_Qiskit.ipynb @@ -0,0 +1,649 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Two Qubit Randomized Benchmarking in LabOne Q with Qiskit" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this notebook, you'll use the [Qiskit Experiment Library](https://qiskit.org/ecosystem/experiments/apidocs/library.html) to generate a two qubit randomized benchmarking experiment. You'll then export the generated experiment to [OpenQASM](https://openqasm.com/), import your OpenQASM experiment into LabOne Q, compile, and simulate the output signals." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# convenience import of all LabOne Q functionality\n", + "from laboneq.simple import *\n", + "\n", + "# plotting and fitting functionality\n", + "from laboneq.contrib.example_helpers.plotting.plot_helpers import *\n", + "from laboneq.contrib.example_helpers.data_analysis.data_analysis import *\n", + "from laboneq.pulse_sheet_viewer.interactive_psv import interactive_psv\n", + "\n", + "# device setup and descriptor\n", + "from laboneq.dsl.utils import calibrate_devices\n", + "from laboneq.dsl.experiment.utils import id_generator\n", + "from laboneq.contrib.example_helpers.generate_descriptor import generate_descriptor\n", + "\n", + "# open qasm importer\n", + "from laboneq.openqasm3.gate_store import GateStore\n", + "\n", + "# qiskit\n", + "from qiskit import qasm3, transpile\n", + "from qiskit_experiments.library import randomized_benchmarking\n", + "\n", + "# additional imports\n", + "from math import pi" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Set up Qiskit-generated RB" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll start by creating a Standard RB experiment from the Qiskit Experiment Library [here](https://qiskit.org/ecosystem/experiments/stubs/qiskit_experiments.library.randomized_benchmarking.StandardRB.html#qiskit_experiments.library.randomized_benchmarking.StandardRB). For two qubits for a few different clifford lengths.\n", + "\n", + "Note that most circuits that can be generated in Qiskit and converted to OpenQASM could be adapted to be run in a similar way in LabOne Q! " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Use Qiskit Experiment Library to Generate RB\n", + "qiskit_experiment = randomized_benchmarking.StandardRB(\n", + " physical_qubits=[0, 1], lengths=[4, 8, 12]\n", + ").circuits()\n", + "\n", + "qiskit_experiment[2].draw()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can then use the Qiskit `transpile` function to obtain your favorite set of basis gates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Choose basis gates\n", + "transpiled_circuit = transpile(\n", + " qiskit_experiment, basis_gates=[\"id\", \"sx\", \"x\", \"rz\", \"cx\"]\n", + ")\n", + "\n", + "transpiled_circuit[0].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "program_list = []\n", + "for circuit in transpiled_circuit:\n", + " program_list.append(qasm3.dumps(circuit))\n", + "print(program_list[0])" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LabOne Q Experiment" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup, Calibration & Configuration" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll define your device setup and calibration below, as well as a function to generate a LabOne Q experiment using the built-in `OpenQasm3Importer`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "generate_descriptor(\n", + " pqsc=[\"DEV10056\"],\n", + " shfqc_6=[\"DEV12108\"],\n", + " hdawg_8=[\"DEV8138\"],\n", + " number_data_qubits=3,\n", + " number_flux_lines=3,\n", + " multiplex=True,\n", + " number_multiplex=3,\n", + " save=True,\n", + " filename=\"SeaCucumber_SHF_HD_PQSC\",\n", + ")\n", + "\n", + "device_setup = DeviceSetup.from_yaml(\n", + " filepath=\"./Descriptors/SeaCucumber_SHF_HD_PQSC.yaml\",\n", + " server_host=\"ip_address\",\n", + " server_port=\"8004\",\n", + " setup_name=\"my_setup_name\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "q0 = Transmon.from_logical_signal_group(\n", + " \"q0\",\n", + " lsg=device_setup.logical_signal_groups[\"q0\"],\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.15e9,\n", + " resonance_frequency_ef=5.85e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", + " \"cross_resonance_frequency\": 200e6,\n", + " \"amplitude_pi\": 0.5,\n", + " \"pulse_length\": 50e-9,\n", + " \"readout_len\": 5e-7,\n", + " \"readout_amp\": 0.2,\n", + " \"reset_length\": 200e-9,\n", + " },\n", + " ),\n", + ")\n", + "\n", + "q1 = Transmon.from_logical_signal_group(\n", + " \"q1\",\n", + " lsg=device_setup.logical_signal_groups[\"q1\"],\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.25e9,\n", + " resonance_frequency_ef=5.95e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", + " \"cross_resonance_frequency\": -200e6,\n", + " \"amplitude_pi\": 0.6,\n", + " \"pulse_length\": 50e-9,\n", + " \"readout_len\": 5e-7,\n", + " \"readout_amp\": 0.2,\n", + " \"reset_length\": 200e-9,\n", + " },\n", + " ),\n", + ")\n", + "\n", + "qubits = [q0, q1]\n", + "for qubit in qubits:\n", + " device_setup.set_calibration(qubit.calibration())\n", + " # set calibration of cross resonance signal lines - not currently included in TransmonQubit calibration method\n", + " device_setup.logical_signal_groups[qubit.uid].logical_signals[\n", + " \"drive_line_cr\"\n", + " ].calibration = SignalCalibration(\n", + " oscillator=Oscillator(\n", + " frequency=qubit.parameters.user_defined[\"cross_resonance_frequency\"],\n", + " modulation_type=ModulationType.HARDWARE,\n", + " )\n", + " )" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Transpilation Support (Gate Definitions)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll now define functions to generate pulses and gates from the OpenQASM program text." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def drive_pulse(qubit: Qubit, label, length=50e-9, amplitude=0.6):\n", + " \"\"\"Return a drive pulse for the given qubit.\n", + "\n", + " In practice different drive pulses would be specified for each qubit and operation.\n", + " \"\"\"\n", + " return pulse_library.drag(\n", + " uid=f\"{qubit.uid}_{label}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " amplitude=qubit.parameters.user_defined[\"amplitude_pi\"],\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def drive_pulse_root(qubit: Qubit, label, length=50e-9, amplitude=0.6):\n", + " \"\"\"Return a root drive pulse for the given qubit.\n", + "\n", + " In practice different drive pulses would be specified for each qubit and operation.\n", + " \"\"\"\n", + " return pulse_library.drag(\n", + " uid=f\"{qubit.uid}_{label}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " amplitude=(qubit.parameters.user_defined[\"amplitude_pi\"]) / 2,\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def rz(qubit: Qubit):\n", + " \"\"\"Return a parameterized Rz gate for the specified qubit.\n", + "\n", + " The gate is a function that takes the angle to rotate and\n", + " returns a LabOne Q section that performs the rotation.\n", + " \"\"\"\n", + "\n", + " def rz_gate(angle: float):\n", + " \"\"\"Rz(theta).\n", + "\n", + " Theta is in radians - implements a virtual z-gate\n", + " \"\"\"\n", + " gate = Section(uid=id_generator(f\"p_{qubit.uid}_rz_{int(180 * angle / pi)}\"))\n", + " gate.play(\n", + " signal=qubit.signals[\"drive\"],\n", + " pulse=None,\n", + " increment_oscillator_phase=angle,\n", + " )\n", + " return gate\n", + "\n", + " return rz_gate" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def measurement(qubit: Qubit):\n", + " \"\"\"Return a measurement operation of the specified qubit.\n", + "\n", + " The operation is a function that takes the measurement handle (a string)\n", + " and returns a LabOne Q section that performs the measurement.\n", + " \"\"\"\n", + "\n", + " def measurement_gate(handle: str):\n", + " \"\"\"Perform a measurement.\n", + "\n", + " Handle is the name of where to store the measurement result. E.g. \"meas[0]\".\n", + " \"\"\"\n", + " measure_pulse = pulse_library.gaussian_square(\n", + " uid=f\"{qubit.uid}_readout_pulse\",\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", + " amplitude=qubit.parameters.user_defined[\"readout_amp\"],\n", + " )\n", + " integration_kernel = pulse_library.const(\n", + " uid=f\"{qubit.uid}_integration_kernel\",\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", + " )\n", + "\n", + " gate = Section(uid=id_generator(f\"meas_{qubit.uid}_{handle}\"))\n", + " gate.reserve(signal=qubit.signals[\"drive\"])\n", + " gate.play(signal=qubit.signals[\"measure\"], pulse=measure_pulse)\n", + " gate.acquire(\n", + " signal=qubit.signals[\"acquire\"],\n", + " handle=handle,\n", + " kernel=integration_kernel,\n", + " )\n", + " return gate\n", + "\n", + " return measurement_gate" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def cx(control: Qubit, target: Qubit):\n", + " \"\"\"Return a controlled X gate for the specified control and target qubits.\n", + "\n", + " The CX gate function takes no arguments and returns a LabOne Q section that performs\n", + " the controllex X gate.\n", + " \"\"\"\n", + "\n", + " def cx_gate():\n", + " cx_id = f\"cx_{control.uid}_{target.uid}\"\n", + "\n", + " gate = Section(uid=id_generator(cx_id))\n", + "\n", + " # define X pulses for target and control\n", + " x180_pulse_control = drive_pulse(control, label=\"x180\")\n", + " x180_pulse_target = drive_pulse(target, label=\"x180\")\n", + "\n", + " # define cancellation pulses for target and control\n", + " cancellation_control_n = pulse_library.gaussian_square(uid=\"CR-\")\n", + " cancellation_control_p = pulse_library.gaussian_square(uid=\"CR+\")\n", + " cancellation_target_p = pulse_library.gaussian_square(uid=\"q1+\")\n", + " cancellation_target_n = pulse_library.gaussian_square(uid=\"q1-\")\n", + "\n", + " # play X pulses on both target and control\n", + " x180_both = Section(uid=id_generator(f\"{cx_id}_x_both\"))\n", + " x180_both.play(signal=control.signals[\"drive\"], pulse=x180_pulse_control)\n", + " x180_both.play(signal=target.signals[\"drive\"], pulse=x180_pulse_target)\n", + " gate.add(x180_both)\n", + "\n", + " # First cross-resonance component\n", + " cancellation_p = Section(\n", + " uid=id_generator(f\"{cx_id}_canc_p\"), play_after=x180_both.uid\n", + " )\n", + " cancellation_p.play(signal=target.signals[\"drive\"], pulse=cancellation_target_p)\n", + " cancellation_p.play(\n", + " signal=control.signals[\"flux\"], pulse=cancellation_control_n\n", + " )\n", + " gate.add(cancellation_p)\n", + "\n", + " # play X pulse on control\n", + " x180_control = Section(\n", + " uid=id_generator(f\"{cx_id}_x_q0\"), play_after=cancellation_p.uid\n", + " )\n", + " x180_control.play(signal=control.signals[\"drive\"], pulse=x180_pulse_control)\n", + " gate.add(x180_control)\n", + "\n", + " # Second cross-resonance component\n", + " cancellation_n = Section(\n", + " uid=id_generator(f\"cx_{cx_id}_canc_n\"), play_after=x180_control.uid\n", + " )\n", + " cancellation_n.play(signal=target.signals[\"drive\"], pulse=cancellation_target_n)\n", + " cancellation_n.play(\n", + " signal=control.signals[\"flux\"], pulse=cancellation_control_p\n", + " )\n", + " gate.add(cancellation_n)\n", + "\n", + " return gate\n", + "\n", + " return cx_gate" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Two Qubit RB" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You're almost ready to run your experiment!" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Connect to Session" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll need to start a LabOne Q session. Here, you'll run the session in emulation mode. If you've modified the descriptor to run on your own devices above, you could connect to them here instead." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "my_session = Session(device_setup=device_setup)\n", + "my_session.connect(do_emulation=True)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define Gates, Load QASM 3 Program, and Go!" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, you'll map your OpenQASM gates to signals produced on the instruments using `register_gate` and `register_gate_section` functions. \n", + "\n", + "Once you've done that, you can compile your experiment and plot the output using the LabOne Q simulator." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gate_store = GateStore()\n", + "qubit_map = {\"_all_qubits[0]\": q0, \"_all_qubits[1]\": q1}\n", + "\n", + "# Single qubit gates:\n", + "\n", + "for oq3_qubit, l1q_qubit in qubit_map.items():\n", + " gate_store.register_gate(\n", + " \"sx\",\n", + " oq3_qubit,\n", + " drive_pulse_root(l1q_qubit, label=\"sx\"),\n", + " signal=l1q_qubit.signals[\"drive\"],\n", + " )\n", + " gate_store.register_gate(\n", + " \"x\",\n", + " oq3_qubit,\n", + " drive_pulse(l1q_qubit, label=\"x\"),\n", + " signal=l1q_qubit.signals[\"drive\"],\n", + " )\n", + " gate_store.register_gate_section(\"rz\", (oq3_qubit,), rz(l1q_qubit))\n", + " gate_store.register_gate_section(\"measure\", (oq3_qubit,), measurement(l1q_qubit))\n", + "\n", + "# Two qubit gates:\n", + "gate_store.register_gate_section(\"cx\", (\"_all_qubits[0]\", \"_all_qubits[1]\"), cx(q0, q1))\n", + "gate_store.register_gate_section(\"cx\", (\"_all_qubits[1]\", \"_all_qubits[0]\"), cx(q1, q0))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp = exp_from_qasm(program_list[0], qubits=qubit_map, gate_store=gate_store)\n", + "compiled_exp = my_session.compile(exp)\n", + "\n", + "plot_simulation(compiled_exp, length=100e-6)\n", + "\n", + "my_results = my_session.run(compiled_exp)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Draw the circuit from above" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can also draw the circuit corresponding to the simulated signals you just produced!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "transpiled_circuit[0].draw()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Compile and draw more circuits in the list" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can do this for any circuit you've generated in the list." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp_1 = exp_from_qasm(program_list[1], qubits=qubit_map, gate_store=gate_store)\n", + "compiled_exp_1 = my_session.compile(exp_1)\n", + "\n", + "plot_simulation(compiled_exp_1, length=100e-6)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "transpiled_circuit[1].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp_2 = exp_from_qasm(program_list[2], qubits=qubit_map, gate_store=gate_store)\n", + "compiled_exp_2 = my_session.compile(exp_2)\n", + "\n", + "plot_simulation(compiled_exp_2, length=100e-6)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "plot_simulation(compiled_exp_2, length=1000e-6)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "transpiled_circuit[2].draw()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "ZI_LabOneQ_2p7_Public", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/examples/06_qasm/03_Two_Qubit_RB_pyGSTi_OpenQASM.ipynb b/examples/06_qasm/03_Two_Qubit_RB_pyGSTi_OpenQASM.ipynb new file mode 100644 index 0000000..ad546a7 --- /dev/null +++ b/examples/06_qasm/03_Two_Qubit_RB_pyGSTi_OpenQASM.ipynb @@ -0,0 +1,701 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Two Qubit Randomized Benchmarking in LabOne Q with pyGSTi" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this notebook, you'll use the [pyGSTi](https://github.com/pyGSTio/pyGSTi) package to generate a two qubit randomized benchmarking experiment. You'll then export the generated experiment to [OpenQASM](https://openqasm.com/), import your OpenQASM experiment into LabOne Q, compile, and simulate the output signals." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Python Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# convenience import of all LabOne Q functionality\n", + "from laboneq.simple import *\n", + "\n", + "# plotting and fitting functionality\n", + "from laboneq.contrib.example_helpers.plotting.plot_helpers import *\n", + "from laboneq.contrib.example_helpers.data_analysis.data_analysis import *\n", + "\n", + "# device setup and descriptor\n", + "from laboneq.dsl.utils import calibrate_devices\n", + "from laboneq.dsl.experiment.utils import id_generator\n", + "from laboneq.contrib.example_helpers.generate_descriptor import generate_descriptor\n", + "\n", + "# LabOne Q OpenQASM Tools\n", + "from laboneq.openqasm3.gate_store import GateStore\n", + "\n", + "# qiskit\n", + "from qiskit import qasm3, transpile\n", + "\n", + "# pyGSTi\n", + "import pygsti\n", + "from pygsti.processors import QubitProcessorSpec as QPS\n", + "from pygsti.processors import CliffordCompilationRules as CCR\n", + "\n", + "# additional imports\n", + "from math import pi" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# pyGSTi Experiment Generation" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll start by creating a Clifford RB experiment in a similar fashion as done in the pyGSTi Tutorial [here](https://github.com/pyGSTio/pyGSTi/blob/master/jupyter_notebooks/Tutorials/algorithms/RB-CliffordRB.ipynb). \n", + "\n", + "Note that mosst circuits that can be generated in pyGSTi and converted to OpenQASM could be adapted to be run in a similar way! " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Define pyGSTi 2 Qubit RB circuit\n", + "\n", + "n_qubits = 2\n", + "qubit_labels = [\"Q0\", \"Q1\"]\n", + "gate_names = [\"Gxpi2\", \"Gxmpi2\", \"Gypi2\", \"Gympi2\", \"Gcphase\"]\n", + "availability = {\"Gcphase\": [(\"Q0\", \"Q1\")]}\n", + "\n", + "# Uncomment below for more qubits or to use a different set of basis gates\n", + "# n_qubits = 4\n", + "# qubit_labels = ['Q0','Q1','Q2','Q3']\n", + "# gate_names = ['Gxpi2', 'Gxmpi2', 'Gypi2', 'Gympi2', 'Gcnot']\n", + "# availability = {'Gcphase':[('Q0','Q1'), ('Q1','Q2'), ('Q2','Q3'), ('Q3','Q0')]}\n", + "\n", + "pspec = QPS(n_qubits, gate_names, availability=availability, qubit_labels=qubit_labels)\n", + "\n", + "compilations = {\n", + " \"absolute\": CCR.create_standard(\n", + " pspec, \"absolute\", (\"paulis\", \"1Qcliffords\"), verbosity=0\n", + " ),\n", + " \"paulieq\": CCR.create_standard(\n", + " pspec, \"paulieq\", (\"1Qcliffords\", \"allcnots\"), verbosity=0\n", + " ),\n", + "}\n", + "\n", + "depths = [20, 50]\n", + "circuits_per_depth = 2\n", + "\n", + "qubits = [\"Q0\", \"Q1\"]\n", + "\n", + "randomizeout = True\n", + "citerations = 20" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll then compile the circuits for your Clifford RB experiment and print them." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "design = pygsti.protocols.CliffordRBDesign(\n", + " pspec,\n", + " compilations,\n", + " depths,\n", + " circuits_per_depth,\n", + " qubit_labels=qubits,\n", + " randomizeout=randomizeout,\n", + " citerations=citerations,\n", + ")\n", + "\n", + "circuits_rb = design.all_circuits_needing_data\n", + "\n", + "for circuit in circuits_rb:\n", + " print(circuit)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## pyGSTi Output to QASM 3 Sanitization" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, you'll define a function to output the above circuit into OpenQASM. \n", + "\n", + "Note: while pyGSTi can output directly into OpenQASM 2, Labone Q imports OpenQASM 3, so we take care of that in this function as well." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def sanitize_pygsti_output(\n", + " circuit=circuits_rb,\n", + " pygsti_standard_gates=\"x-sx-rz\",\n", + " qasm_basis_gates=[\"id\", \"sx\", \"x\", \"rz\", \"cx\"],\n", + " # qasm_basis_gates=[\"rx\",\"ry\",\"rz\",\"cz\"],\n", + "):\n", + " qasm2_circuit = []\n", + "\n", + " for circuit in circuits_rb:\n", + " # pyGSTi standard gates are \"u3\" and \"x-sx-rz\"\"\n", + " qasm2_circuit.append(\n", + " circuit.convert_to_openqasm(standard_gates_version=pygsti_standard_gates)\n", + " .replace(\"OPENQASM 2.0;\", \"OPENQASM 3.0;\")\n", + " .replace('include \"qelib1.inc\";', 'include \"stdgates.inc\";')\n", + " )\n", + "\n", + " qasm3_circuit = []\n", + "\n", + " for entry in qasm2_circuit:\n", + " qasm3_circuit.append(\n", + " qasm3.Exporter().dumps(\n", + " transpile(\n", + " qasm3.loads(entry),\n", + " basis_gates=qasm_basis_gates,\n", + " )\n", + " )\n", + " )\n", + "\n", + " return qasm3_circuit" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll now output your OpenQASM 3 circuits as a list and print the first one." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "program_list = sanitize_pygsti_output(\n", + " qasm_basis_gates=[\"id\", \"sx\", \"x\", \"rz\", \"cx\"],\n", + ")\n", + "\n", + "# Prtint the first circuit in the list\n", + "print(program_list[0])" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LabOne Q Experiment" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Setup, Calibration & Configuration" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll define your device setup and calibration below using Qubits to calibrate your devices." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "generate_descriptor(\n", + " pqsc=[\"DEV10056\"],\n", + " shfqc_6=[\"DEV12108\"],\n", + " hdawg_8=[\"DEV8138\"],\n", + " number_data_qubits=3,\n", + " number_flux_lines=3,\n", + " multiplex=True,\n", + " number_multiplex=3,\n", + " save=True,\n", + " filename=\"SeaCucumber_SHF_HD_PQSC\",\n", + ")\n", + "\n", + "device_setup = DeviceSetup.from_yaml(\n", + " filepath=\"./Descriptors/SeaCucumber_SHF_HD_PQSC.yaml\",\n", + " server_host=\"ip_address\",\n", + " server_port=\"8004\",\n", + " setup_name=\"my_setup_name\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "q0 = Transmon.from_logical_signal_group(\n", + " \"q0\",\n", + " lsg=device_setup.logical_signal_groups[\"q0\"],\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.15e9,\n", + " resonance_frequency_ef=5.85e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", + " \"cross_resonance_frequency\": 200e6,\n", + " \"amplitude_pi\": 0.5,\n", + " \"pulse_length\": 50e-9,\n", + " \"readout_len\": 5e-7,\n", + " \"readout_amp\": 0.2,\n", + " \"reset_length\": 200e-9,\n", + " },\n", + " ),\n", + ")\n", + "\n", + "q1 = Transmon.from_logical_signal_group(\n", + " \"q1\",\n", + " lsg=device_setup.logical_signal_groups[\"q1\"],\n", + " parameters=TransmonParameters(\n", + " resonance_frequency_ge=6.25e9,\n", + " resonance_frequency_ef=5.95e9,\n", + " drive_lo_frequency=6.1e9,\n", + " readout_resonator_frequency=6.4e9,\n", + " readout_lo_frequency=6.3e9,\n", + " user_defined={\n", + " \"cross_resonance_frequency\": -200e6,\n", + " \"amplitude_pi\": 0.6,\n", + " \"pulse_length\": 50e-9,\n", + " \"readout_len\": 5e-7,\n", + " \"readout_amp\": 0.2,\n", + " \"reset_length\": 200e-9,\n", + " },\n", + " ),\n", + ")\n", + "\n", + "qubits = [q0, q1]\n", + "for qubit in qubits:\n", + " device_setup.set_calibration(qubit.calibration())\n", + " # set calibration of cross resonance signal lines - not currently included in TransmonQubit calibration method\n", + " device_setup.logical_signal_groups[qubit.uid].logical_signals[\n", + " \"drive_line_cr\"\n", + " ].calibration = SignalCalibration(\n", + " oscillator=Oscillator(\n", + " frequency=qubit.parameters.user_defined[\"cross_resonance_frequency\"],\n", + " modulation_type=ModulationType.HARDWARE,\n", + " )\n", + " )" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Transpilation Support (Gate Definitions)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll now define functions to generate pulses and gates from the OpenQASM program text." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def drive_pulse(qubit: Qubit, label, length=50e-9, amplitude=0.6):\n", + " \"\"\"Return a drive pulse for the given qubit.\n", + "\n", + " In practice different drive pulses would be specified for each qubit and operation.\n", + " \"\"\"\n", + " return pulse_library.drag(\n", + " uid=f\"{qubit.uid}_{label}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " amplitude=qubit.parameters.user_defined[\"amplitude_pi\"],\n", + " )\n", + "\n", + "\n", + "def drive_pulse_root(qubit: Qubit, label, length=50e-9, amplitude=0.6):\n", + " \"\"\"Return a root drive pulse for the given qubit.\n", + "\n", + " In practice different drive pulses would be specified for each qubit and operation.\n", + " \"\"\"\n", + " return pulse_library.drag(\n", + " uid=f\"{qubit.uid}_{label}\",\n", + " length=qubit.parameters.user_defined[\"pulse_length\"],\n", + " amplitude=(qubit.parameters.user_defined[\"amplitude_pi\"]) / 2,\n", + " )\n", + "\n", + "\n", + "def rz(qubit: Qubit):\n", + " \"\"\"Return a parameterized Rz gate for the specified qubit.\n", + "\n", + " The gate is a function that takes the angle to rotate and\n", + " returns a LabOne Q section that performs the rotation.\n", + " \"\"\"\n", + "\n", + " def rz_gate(angle: float):\n", + " \"\"\"Rz(theta).\n", + "\n", + " Theta is in radians - implements a virtual z-gate\n", + " \"\"\"\n", + " gate = Section(uid=id_generator(f\"p_{qubit.uid}_rz_{int(180 * angle / pi)}\"))\n", + " gate.play(\n", + " signal=qubit.signals[\"drive\"],\n", + " pulse=None,\n", + " increment_oscillator_phase=angle,\n", + " )\n", + " return gate\n", + "\n", + " return rz_gate\n", + "\n", + "\n", + "def measurement(qubit: Qubit):\n", + " \"\"\"Return a measurement operation of the specified qubit.\n", + "\n", + " The operation is a function that takes the measurement handle (a string)\n", + " and returns a LabOne Q section that performs the measurement.\n", + " \"\"\"\n", + "\n", + " def measurement_gate(handle: str):\n", + " \"\"\"Perform a measurement.\n", + "\n", + " Handle is the name of where to store the measurement result. E.g. \"meas[0]\".\n", + " \"\"\"\n", + " measure_pulse = pulse_library.gaussian_square(\n", + " uid=f\"{qubit.uid}_readout_pulse\",\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", + " amplitude=qubit.parameters.user_defined[\"readout_amp\"],\n", + " )\n", + " integration_kernel = pulse_library.const(\n", + " uid=f\"{qubit.uid}_integration_kernel\",\n", + " length=qubit.parameters.user_defined[\"readout_len\"],\n", + " )\n", + "\n", + " gate = Section(uid=id_generator(f\"meas_{qubit.uid}_{handle}\"))\n", + " gate.reserve(signal=qubit.signals[\"drive\"])\n", + " gate.play(signal=qubit.signals[\"measure\"], pulse=measure_pulse)\n", + " gate.acquire(\n", + " signal=qubit.signals[\"acquire\"],\n", + " handle=handle,\n", + " kernel=integration_kernel,\n", + " )\n", + " return gate\n", + "\n", + " return measurement_gate\n", + "\n", + "\n", + "def cx(control: Qubit, target: Qubit):\n", + " \"\"\"Return a controlled X gate for the specified control and target qubits.\n", + "\n", + " The CX gate function takes no arguments and returns a LabOne Q section that performs\n", + " the controllex X gate.\n", + " \"\"\"\n", + "\n", + " def cx_gate():\n", + " cx_id = f\"cx_{control.uid}_{target.uid}\"\n", + "\n", + " gate = Section(uid=id_generator(cx_id))\n", + "\n", + " # define X pulses for target and control\n", + " x180_pulse_control = drive_pulse(control, label=\"x180\")\n", + " x180_pulse_target = drive_pulse(target, label=\"x180\")\n", + "\n", + " # define cancellation pulses for target and control\n", + " cancellation_control_n = pulse_library.gaussian_square(uid=\"CR-\")\n", + " cancellation_control_p = pulse_library.gaussian_square(uid=\"CR+\")\n", + " cancellation_target_p = pulse_library.gaussian_square(uid=\"q1+\")\n", + " cancellation_target_n = pulse_library.gaussian_square(uid=\"q1-\")\n", + "\n", + " # play X pulses on both target and control\n", + " x180_both = Section(uid=id_generator(f\"{cx_id}_x_both\"))\n", + " x180_both.play(signal=control.signals[\"drive\"], pulse=x180_pulse_control)\n", + " x180_both.play(signal=target.signals[\"drive\"], pulse=x180_pulse_target)\n", + " gate.add(x180_both)\n", + "\n", + " # First cross-resonance component\n", + " cancellation_p = Section(\n", + " uid=id_generator(f\"{cx_id}_canc_p\"), play_after=x180_both.uid\n", + " )\n", + " cancellation_p.play(signal=target.signals[\"drive\"], pulse=cancellation_target_p)\n", + " cancellation_p.play(\n", + " signal=control.signals[\"flux\"], pulse=cancellation_control_n\n", + " )\n", + " gate.add(cancellation_p)\n", + "\n", + " # play X pulse on control\n", + " x180_control = Section(\n", + " uid=id_generator(f\"{cx_id}_x_q0\"), play_after=cancellation_p.uid\n", + " )\n", + " x180_control.play(signal=control.signals[\"drive\"], pulse=x180_pulse_control)\n", + " gate.add(x180_control)\n", + "\n", + " # Second cross-resonance component\n", + " cancellation_n = Section(\n", + " uid=id_generator(f\"cx_{cx_id}_canc_n\"), play_after=x180_control.uid\n", + " )\n", + " cancellation_n.play(signal=target.signals[\"drive\"], pulse=cancellation_target_n)\n", + " cancellation_n.play(\n", + " signal=control.signals[\"flux\"], pulse=cancellation_control_p\n", + " )\n", + " gate.add(cancellation_n)\n", + "\n", + " return gate\n", + "\n", + " return cx_gate" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Two Qubit RB" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You're almost ready to run your experiment!" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Connect to Session" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You'll need to start a LabOne Q session. Here, you'll run the session in emulation mode. If you've modified the descriptor to run on your own devices above, you could connect to them here instead." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "my_session = Session(device_setup=device_setup)\n", + "my_session.connect(do_emulation=True)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define Gates, Load QASM 3 Program, and Go!" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, you'll map your OpenQASM gates to signals produced on the instruments using `register_gate` and `register_gate_section` functions. \n", + "\n", + "Once you've done that, you can compile your experiment and plot the output using the LabOne Q simulator." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gate_store = GateStore()\n", + "qubit_map = {\"_all_qubits[0]\": q0, \"_all_qubits[1]\": q1}\n", + "\n", + "# Single qubit gates:\n", + "\n", + "for oq3_qubit, l1q_qubit in qubit_map.items():\n", + " gate_store.register_gate(\n", + " \"sx\",\n", + " oq3_qubit,\n", + " drive_pulse_root(l1q_qubit, label=\"sx\"),\n", + " signal=l1q_qubit.signals[\"drive\"],\n", + " )\n", + " gate_store.register_gate(\n", + " \"x\",\n", + " oq3_qubit,\n", + " drive_pulse(l1q_qubit, label=\"x\"),\n", + " signal=l1q_qubit.signals[\"drive\"],\n", + " )\n", + " gate_store.register_gate_section(\"rz\", (oq3_qubit,), rz(l1q_qubit))\n", + " gate_store.register_gate_section(\"measure\", (oq3_qubit,), measurement(l1q_qubit))\n", + "\n", + "# Two qubit gates:\n", + "gate_store.register_gate_section(\"cx\", (\"_all_qubits[0]\", \"_all_qubits[1]\"), cx(q0, q1))\n", + "gate_store.register_gate_section(\"cx\", (\"_all_qubits[1]\", \"_all_qubits[0]\"), cx(q1, q0))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Compile Experiments for desired circuit in the RB circuits list\n", + "# Could also compile all in a for loop over list of circuits, if desired\n", + "exp = exp_from_qasm(program_list[0], qubits=qubit_map, gate_store=gate_store)\n", + "compiled_exp = my_session.compile(exp)\n", + "\n", + "plot_simulation(compiled_exp, length=100e-6)\n", + "\n", + "my_results = my_session.run(compiled_exp)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Draw the circuit from above" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can also draw the circuit corresponding to the simulated signals you just produced!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "circuit_to_draw_0 = qasm3.loads(program_list[0])\n", + "circuit_to_draw_0.draw()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Compile and draw more circuits in the list" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can do this for any circuit you've generated in the list." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Compile Experiments\n", + "exp_1 = exp_from_qasm(program_list[1], qubits=qubit_map, gate_store=gate_store)\n", + "compiled_exp_1 = my_session.compile(exp_1)\n", + "\n", + "plot_simulation(compiled_exp_1, length=100e-6)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "circuit_to_draw_1 = qasm3.loads(program_list[1])\n", + "circuit_to_draw_1.draw()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "ZI_LabOneQ_2p7_Public", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/laboneq/VERSION.txt b/laboneq/VERSION.txt index f161b5d..ed0edc8 100644 --- a/laboneq/VERSION.txt +++ b/laboneq/VERSION.txt @@ -1 +1 @@ -2.10.0 \ No newline at end of file +2.11.0 \ No newline at end of file diff --git a/laboneq/compiler/code_generator/analyze_playback.py b/laboneq/compiler/code_generator/analyze_playback.py index 15a921d..fb2fdbb 100644 --- a/laboneq/compiler/code_generator/analyze_playback.py +++ b/laboneq/compiler/code_generator/analyze_playback.py @@ -102,7 +102,8 @@ def _analyze_branches(events, delay, sampling_rate, playwave_max_hint): for ev in events: if ev["event_type"] == "SECTION_START": handle = ev.get("handle", None) - if handle is not None: + user_register = ev.get("user_register", None) + if handle is not None or user_register is not None: begin = length_to_samples(ev["time"] + delay, sampling_rate) # Add the command table interval boundaries as cut points cut_points.add(begin) @@ -112,7 +113,7 @@ def _analyze_branches(events, delay, sampling_rate, playwave_max_hint): # right time; todo(JL): Use actual min_play_wave begin=begin, end=None, - data=(handle, ev["local"]), + data=(handle, ev["local"], user_register), ) ) else: @@ -595,6 +596,7 @@ def analyze_play_wave_times( params={ "handle": interval.data[0], "local": interval.data[1], + "user_register": interval.data[2], "signal_id": signal_id, "section_name": section_name, }, diff --git a/laboneq/compiler/code_generator/sampled_event_handler.py b/laboneq/compiler/code_generator/sampled_event_handler.py index 0115740..e07749a 100644 --- a/laboneq/compiler/code_generator/sampled_event_handler.py +++ b/laboneq/compiler/code_generator/sampled_event_handler.py @@ -113,13 +113,13 @@ def __init__( self.use_command_table = use_command_table self.emit_timing_comments = emit_timing_comments - self.sampled_event_list: List[AWGEvent] = None - self.declared_variables = set() + self.sampled_event_list: List[AWGEvent] = None # type: ignore self.loop_stack: List[AWGEvent] = [] - self.last_event: AWGEvent = None - self.match_parent_event: AWGEvent = None + self.last_event: Optional[AWGEvent] = None + self.match_parent_event: Optional[AWGEvent] = None self.command_table_match_offset = None - self.match_command_table_entries = {} + self.match_command_table_entries: dict[int, tuple] = {} # For feedback match + self.match_seqc_generators: dict[int, SeqCGenerator] = {} # user_register match self.current_sequencer_step = 0 if use_current_sequencer_step else None self.sequencer_step = 8 # todo(JL): Is this always the case, and how to get it? @@ -142,15 +142,14 @@ def handle_playwave( state = signature.state match_statement_active = self.match_parent_event is not None + assert (state is not None) == match_statement_active handle = ( self.match_parent_event.params["handle"] if self.match_parent_event is not None else None ) - assert (state is not None) == match_statement_active - - if not self.use_command_table and state is not None: + if not self.use_command_table and state is not None and handle is not None: raise LabOneQException( f"Found match/case statement for handle {handle} on unsupported device." ) @@ -175,13 +174,28 @@ def handle_playwave( if len(self.channels) > 0: play_wave_channel = self.channels[0] % 2 + sig_string = signature.waveform.signature_string() + wave_index = self.get_wave_index(signature, sig_string, play_wave_channel) + if not match_statement_active: + self.handle_regular_playwave( + sampled_event, signature, sig_string, wave_index, play_wave_channel + ) + else: + if handle is not None: + self.handle_playwave_on_feedback(sampled_event, signature, wave_index) + else: + self.handle_playwave_on_user_register( + signature, sig_string, wave_index, play_wave_channel + ) + return True + + def get_wave_index(self, signature, sig_string, play_wave_channel): signal_type_for_wave_index = ( self.awg.signal_type.value if self.device_type.supports_binary_waves else "csv" # Include CSV waves into the index to keep track of waves-AWG mapping ) - sig_string = signature.waveform.signature_string() if ( not signature.waveform.samples and all(p.pulse is None for p in signature.waveform.pulses) @@ -204,50 +218,98 @@ def handle_playwave( play_wave_channel, ) - if not match_statement_active: - if self.use_command_table: - ct_index = self.command_table_tracker.lookup_index_by_signature( - signature + return wave_index + + def handle_regular_playwave( + self, + sampled_event: AWGEvent, + signature: PlaybackSignature, + sig_string: str, + wave_index: Optional[int], + play_wave_channel: Optional[int], + ): + assert signature.waveform is not None + if self.use_command_table: + ct_index = self.command_table_tracker.lookup_index_by_signature(signature) + if ct_index is None: + ct_index = self.command_table_tracker.create_entry( + signature, wave_index ) - if ct_index is None: - ct_index = self.command_table_tracker.create_entry( - signature, wave_index - ) - comment = sig_string - if signature.hw_oscillator is not None: - comment += f", osc={signature.hw_oscillator}" - self.seqc_tracker.add_command_table_execution(ct_index, comment=comment) - else: - self.seqc_tracker.add_play_wave_statement( - self.device_type, - self.awg.signal_type.value, - sig_string, - play_wave_channel, + comment = sig_string + if signature.hw_oscillator is not None: + comment += f", osc={signature.hw_oscillator}" + self.seqc_tracker.add_command_table_execution(ct_index, comment=comment) + else: + self.seqc_tracker.add_play_wave_statement( + self.device_type, + self.awg.signal_type.value, + sig_string, + play_wave_channel, + ) + self.seqc_tracker.flush_deferred_function_calls() + self.seqc_tracker.current_time = sampled_event.end + + def handle_playwave_on_feedback( + self, + sampled_event: AWGEvent, + signature: PlaybackSignature, + wave_index: Optional[int], + ): + assert self.use_command_table + assert self.match_parent_event is not None + state = signature.state + signal_id = sampled_event.params["signal_id"] + + if state in self.match_command_table_entries: + if self.match_command_table_entries[state] != ( + signature, + wave_index, + sampled_event.start - self.match_parent_event.start, + ): + raise LabOneQException( + f"Duplicate state {state} with different pulses for handle " + f"{self.match_parent_event.params['handle']} found." ) - self.seqc_tracker.flush_deferred_function_calls() - self.seqc_tracker.current_time = sampled_event.end else: - assert self.use_command_table - if state in self.match_command_table_entries: - if self.match_command_table_entries[state] != ( - signature, - wave_index, - sampled_event.start - self.match_parent_event.start, - ): - raise LabOneQException( - f"Duplicate state {state} with different pulses for handle " - f"{self.match_parent_event.params['handle']} found." - ) - else: - self.match_command_table_entries[state] = ( - signature, - wave_index, - sampled_event.start - self.match_parent_event.start, + self.match_command_table_entries[state] = ( + signature, + wave_index, + sampled_event.start - self.match_parent_event.start, + ) + self.feedback_connections.setdefault( + self.match_parent_event.params["handle"], FeedbackConnection(None) + ).drive.add(signal_id) + + def handle_playwave_on_user_register( + self, + signature: PlaybackSignature, + sig_string: str, + wave_index: Optional[int], + play_wave_channel: Optional[int], + ): + assert self.match_parent_event is not None + user_register = self.match_parent_event.params["user_register"] + state = signature.state + assert state is not None + assert user_register is not None + branch_generator = self.match_seqc_generators.setdefault(state, SeqCGenerator()) + if self.use_command_table: + ct_index = self.command_table_tracker.lookup_index_by_signature(signature) + if ct_index is None: + ct_index = self.command_table_tracker.create_entry( + signature, wave_index ) - self.feedback_connections.setdefault( - self.match_parent_event.params["handle"], FeedbackConnection(None) - ).drive.add(signal_id) - return True + comment = sig_string + if signature.hw_oscillator is not None: + comment += f", osc={signature.hw_oscillator}" + branch_generator.add_command_table_execution(ct_index, comment=comment) + else: + branch_generator.add_play_wave_statement( + self.device_type, + self.awg.signal_type.value, + sig_string, + play_wave_channel, + ) def handle_playhold( self, @@ -602,67 +664,117 @@ def handle_iterate(self, sampled_event: AWGEvent): def handle_match(self, sampled_event: AWGEvent): if self.match_parent_event is not None: + mpe_par = self.match_parent_event.params + se_par = sampled_event.params raise LabOneQException( f"Simultaneous match events on the same physical AWG are not supported. " - f"Affected handles: '{self.match_parent_event.params['handle']}' and " - f"'{sampled_event.params['handle']}'" + "Affected handles/user registers: '" + f"{mpe_par['handle'] or mpe_par['user_register']}' and '" + f"{se_par['handle'] or se_par['user_register']}'" ) self.match_parent_event = sampled_event + self.match_seqc_generators = {} def close_event_list(self): if self.match_parent_event is not None: - handle = self.match_parent_event.params["handle"] - sorted_ct_entries = sorted(self.match_command_table_entries.items()) - first = sorted_ct_entries[0][0] - last = sorted_ct_entries[-1][0] - if first != 0 or last - first + 1 != len(sorted_ct_entries): - raise LabOneQException( - f"States missing in match statement with handle {handle}. First " - f"state: {first}, last state: {last}, number of states: " - f"{len(sorted_ct_entries)}, expected {last+1}, starting from 0." - ) + if self.match_parent_event.params["handle"] is not None: + self.close_event_list_for_handle() + elif self.match_parent_event.params["user_register"] is not None: + self.close_event_list_for_user_register() + + def close_event_list_for_handle(self): + assert self.match_parent_event is not None + handle = self.match_parent_event.params["handle"] + sorted_ct_entries = sorted(self.match_command_table_entries.items()) + first = sorted_ct_entries[0][0] + last = sorted_ct_entries[-1][0] + if first != 0 or last - first + 1 != len(sorted_ct_entries): + raise LabOneQException( + f"States missing in match statement with handle {handle}. First " + f"state: {first}, last state: {last}, number of states: " + f"{len(sorted_ct_entries)}, expected {last+1}, starting from 0." + ) - # Check whether we already have the same states in the command table: - if self.command_table_match_offset is not None: - for idx, (signature, wave_index, _) in sorted_ct_entries: - current_ct_entry = self.command_table_tracker[ - idx + self.command_table_match_offset - ] - current_wf_idx = current_ct_entry[1]["waveform"].get("index") - if current_ct_entry[0] != signature or wave_index != current_wf_idx: - raise LabOneQException( - "Multiple command table entry sets for feedback " - f"(handle {handle}), do you use the same pulses and states?" - ) - else: - self.command_table_match_offset = len(self.command_table_tracker) - for idx, (signature, wave_index, _) in sorted_ct_entries: - id2 = self.command_table_tracker.create_entry(signature, wave_index) - assert self.command_table_match_offset + idx == id2 - - ev = self.match_parent_event - start = ev.start - assert start >= self.seqc_tracker.current_time - assert start % self.sequencer_step == 0 - self.seqc_tracker.add_required_playzeros(ev) - # Subtract the 3 cycles that we added (see match_schedule.py for details) - latency = ( - start // self.sequencer_step - - self.current_sequencer_step - - EXECUTETABLEENTRY_LATENCY + # Check whether we already have the same states in the command table: + if self.command_table_match_offset is not None: + for idx, (signature, wave_index, _) in sorted_ct_entries: + current_ct_entry = self.command_table_tracker[ + idx + self.command_table_match_offset + ] + assert current_ct_entry is not None + current_wf_idx = current_ct_entry[1]["waveform"].get("index") + if current_ct_entry[0] != signature or wave_index != current_wf_idx: + raise LabOneQException( + "Multiple command table entry sets for feedback " + f"(handle {handle}), do you use the same pulses and states?" + ) + else: + self.command_table_match_offset = len(self.command_table_tracker) + for idx, (signature, wave_index, _) in sorted_ct_entries: + id2 = self.command_table_tracker.create_entry(signature, wave_index) + assert self.command_table_match_offset + idx == id2 + + ev = self.match_parent_event + start = ev.start + assert start >= self.seqc_tracker.current_time + assert start % self.sequencer_step == 0 + self.seqc_tracker.add_required_playzeros(ev) + # Subtract the 3 cycles that we added (see match_schedule.py for details) + assert self.current_sequencer_step is not None + latency = ( + start // self.sequencer_step + - self.current_sequencer_step + - EXECUTETABLEENTRY_LATENCY + ) + self.seqc_tracker.add_command_table_execution( + "QA_DATA_PROCESSED" if ev.params["local"] else "ZSYNC_DATA_PQSC_REGISTER", + latency="current_seq_step " + + (f"+ {latency}" if latency >= 0 else f"- {-latency}"), + comment="Match handle " + handle, + ) + self.seqc_tracker.add_timing_comment(ev.end) + self.seqc_tracker.flush_deferred_function_calls() + self.seqc_tracker.current_time = self.match_parent_event.end + self.match_parent_event = None + + def close_event_list_for_user_register(self): + match_event = self.match_parent_event + assert match_event is not None + user_register = match_event.params["user_register"] + if not 0 <= user_register <= 15: + raise LabOneQException( + f"Invalid user register {user_register} in match statement. User registers must be between 0 and 15." ) - self.seqc_tracker.add_command_table_execution( - "QA_DATA_PROCESSED" - if ev.params["local"] - else "ZSYNC_DATA_PQSC_REGISTER", - latency="current_seq_step " - + (f"+ {latency}" if latency >= 0 else f"- {-latency}"), - comment="Match handle " + handle, + var_name = f"_match_user_register_{user_register}" + try: + self.declarations_generator.add_variable_declaration( + var_name, f"getUserReg({user_register})" ) - self.seqc_tracker.add_timing_comment(ev.end) - self.seqc_tracker.flush_deferred_function_calls() - self.seqc_tracker.current_time = self.match_parent_event.end - self.match_parent_event = None + except LabOneQException: + pass # Already declared, this is fine + self.seqc_tracker.add_required_playzeros(match_event) + if_generator = SeqCGenerator() + conditions_bodies: list[tuple[Optional[str], SeqCGenerator]] = [ + (f"{var_name} == {state}", gen.compressed()) + for state, gen in self.match_seqc_generators.items() + if gen.num_noncomment_statements() > 0 + ] + # If there is no match, we just play zeros to keep the timing correct + play_zero_body = SeqCGenerator() + play_zero_body.add_play_zero_statement( + match_event.end - self.seqc_tracker.current_time, + self.device_type, + ) + conditions_bodies.append((None, play_zero_body.compressed())) + if_generator.add_if(*zip(*conditions_bodies)) # type: ignore + self.seqc_tracker.append_loop_stack_generator( + always=True, generator=if_generator + ) + self.seqc_tracker.append_loop_stack_generator(always=True) + self.seqc_tracker.add_timing_comment(match_event.end) + self.seqc_tracker.flush_deferred_function_calls() + self.seqc_tracker.current_time = match_event.end + self.match_parent_event = None def handle_sampled_event(self, sampled_event: AWGEvent): signature = sampled_event.type diff --git a/laboneq/compiler/code_generator/seq_c_generator.py b/laboneq/compiler/code_generator/seq_c_generator.py index 20b00fe..cb977e5 100644 --- a/laboneq/compiler/code_generator/seq_c_generator.py +++ b/laboneq/compiler/code_generator/seq_c_generator.py @@ -9,7 +9,7 @@ import re import textwrap from enum import Enum -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, List, Optional, Sequence, Set from laboneq.compiler.code_generator.compressor import Run, compressor_core from laboneq.compiler.common.device_type import DeviceType @@ -145,6 +145,22 @@ def add_do_while(self, condition, body: SeqCGenerator): } ) + def add_if( + self, conditions: Sequence[Optional[str]], bodies: Sequence[SeqCGenerator] + ): + assert len(conditions) == len(bodies) + assert all(b is not None for b in bodies) + assert all(c for c in conditions[:-1]) + complexity = sum([b.estimate_complexity() + 1 for b in bodies]) + self.add_statement( + { + "type": "if", + "conditions": conditions, + "bodies": bodies, + "complexity": complexity, + } + ) + def add_function_def(self, text): self.add_statement({"type": "function_def", "text": text}) @@ -378,6 +394,25 @@ def emit_statement(self, statement: SeqCStatement): body = textwrap.indent(statement["body"].generate_seq_c(), " ") return f"repeat ({statement['num_repeats']}) {{\n{body}}}\n" + elif statement["type"] == "if": + n = len(statement["conditions"]) + bodies = [ + textwrap.indent(b.generate_seq_c(), " ") for b in statement["bodies"] + ] + assert len(statement["bodies"]) == n + text = "" + if n > 0: + text += f"if ({statement['conditions'][0]}) {{\n{bodies[0]}}}\n" + for condition, body in zip(statement["conditions"][1:-1], bodies[1:-1]): + text += f"else if ({condition}) {{\n{body}}}\n" + if n > 1: + condition = statement["conditions"][-1] + if condition is None: + text += f"else {{\n{bodies[-1]}}}\n" + else: + text += f"else if ({condition}) {{\n{bodies[-1]}}}\n" + return text + elif statement["type"] == "assignWaveIndex": wave_channels = self._build_wave_channel_assignment(statement) return f'assignWaveIndex({wave_channels},{statement["wave_index"]});\n' diff --git a/laboneq/compiler/code_generator/seqc_tracker.py b/laboneq/compiler/code_generator/seqc_tracker.py index bd8ce9b..c80ff7c 100644 --- a/laboneq/compiler/code_generator/seqc_tracker.py +++ b/laboneq/compiler/code_generator/seqc_tracker.py @@ -148,7 +148,7 @@ def append_loop_stack_generator(self, always=False, generator=None): top_of_stack = self.loop_stack_generators[-1] if always or len(top_of_stack) == 0 or top_of_stack[-1].num_statements() > 0: - self.loop_stack_generators[-1].append(generator) + top_of_stack.append(generator) def push_loop_stack_generator(self, generator=None): self.loop_stack_generators.append([]) diff --git a/laboneq/compiler/experiment_access/device_info.py b/laboneq/compiler/experiment_access/device_info.py deleted file mode 100644 index 7dd3c64..0000000 --- a/laboneq/compiler/experiment_access/device_info.py +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2022 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -from __future__ import annotations - -from dataclasses import dataclass -from typing import Optional - - -@dataclass -class DeviceInfo: - id: str - device_type: str - serial: str - server: str - interface: str - reference_clock_source: str - is_qc: Optional[bool] diff --git a/laboneq/compiler/experiment_access/dsl_loader.py b/laboneq/compiler/experiment_access/dsl_loader.py index d3d1a09..4503038 100644 --- a/laboneq/compiler/experiment_access/dsl_loader.py +++ b/laboneq/compiler/experiment_access/dsl_loader.py @@ -8,7 +8,6 @@ import logging import typing from dataclasses import dataclass -from numbers import Number from types import SimpleNamespace from typing import Any, Callable, Dict, Tuple @@ -37,22 +36,30 @@ _logger = logging.getLogger(__name__) -def find_value_or_parameter_attr(entity: Any, attr: str, value_types: Tuple[type, ...]): +def find_value_or_parameter_attr( + entity: Any, attr: str, value_types: tuple[type, ...] +) -> tuple[Any, str]: param = None value = getattr(entity, attr, None) if value is not None and not isinstance(value, value_types): param = getattr(value, "uid", None) value = None + return value, param class DSLLoader(LoaderBase): + def __init__(self): + super().__init__() + self._nt_only_params = [] + self._section_operations_to_add = [] + def load(self, experiment: Experiment, device_setup: DeviceSetup): - global_leader_device_id = None + self.global_leader_device_id = None for server in device_setup.servers.values(): if hasattr(server, "leader_uid"): - global_leader_device_id = server.leader_uid + self.global_leader_device_id = server.leader_uid self.add_server(server.uid, server.host, server.port, server.api_level) dest_path_devices = {} @@ -66,25 +73,14 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): reference_clock = device.reference_clock for device in sorted(device_setup.instruments, key=lambda x: x.uid): - server = device.server_uid - driver = type(device).__name__.lower() - serial = device.address - interface = device.interface - is_global_leader = 0 - if global_leader_device_id == device.uid: - is_global_leader = 1 reference_clock_source = getattr(device, "reference_clock_source", None) is_qc = getattr(device, "is_qc", None) self.add_device( device.uid, driver, - serial, - server, - interface, - is_global_leader, - reference_clock, + reference_clock=reference_clock, reference_clock_source=None if reference_clock_source is None else reference_clock_source.value, @@ -235,8 +231,10 @@ def load(self, experiment: Experiment, device_setup: DeviceSetup): if calibration is not None: - def opt_param(val: float | Parameter | None) -> float | str | None: - if val is None or isinstance(val, Number): + def opt_param( + val: float | Parameter | None, + ) -> float | int | str | None: + if val is None or isinstance(val, (float, int)): return val self._nt_only_params.append(val.uid) return val.uid @@ -254,26 +252,17 @@ def opt_param(val: float | Parameter | None) -> float | str | None: oscillator_uid = oscillator.uid - frequency_param = None - frequency = oscillator.frequency - try: - frequency = float(frequency) - except (ValueError, TypeError): - if frequency is not None and hasattr(frequency, "uid"): - frequency_param = frequency.uid - frequency = None - else: - raise + if hasattr(frequency, "uid"): + frequency = self._get_or_create_parameter(frequency.uid) + modulated_paths[ls.path] = { "oscillator_id": oscillator_uid, "is_hardware": is_hardware, } known_oscillator = self._oscillators.get(oscillator_uid) if known_oscillator is None: - self.add_oscillator( - oscillator_uid, frequency, frequency_param, is_hardware - ) + self.add_oscillator(oscillator_uid, frequency, is_hardware) if is_hardware: device_id = dest_path_devices[ls.path]["device"] @@ -281,9 +270,8 @@ def opt_param(val: float | Parameter | None) -> float | str | None: else: if ( known_oscillator["frequency"], - known_oscillator["frequency_param"], - known_oscillator["hardware"], - ) != (frequency, frequency_param, is_hardware): + known_oscillator["is_hardware"], + ) != (frequency, is_hardware): raise Exception( f"Duplicate oscillator uid {oscillator_uid} found in {ls.path}" ) @@ -462,7 +450,8 @@ def opt_param(val: float | Parameter | None) -> float | str | None: if len(channels) > 1: if len(set(channels)) < len(channels): raise RuntimeError( - f"Channels for a signal must be distinct, but got {channels} for signal {signal}, connection ports: {local_ports}" + f"Channels for a signal must be distinct, but got {channels}" + f" for signal {signal}, connection ports: {local_ports}" ) self.add_signal_connection( @@ -550,6 +539,14 @@ def exchanger_map(section): section, None, section_uid_map, acquisition_type_map, exchanger_map ) + # Need to defer the insertion of section operations. In sequential averaging mode, + # the tree-walking order might otherwise make us visit operations which depend on parameters + # we haven't seen the sweep of yet. + for section, acquisition_type, instance_id in self._section_operations_to_add: + self._insert_section_operations( + section, acquisition_type, exchanger_map, instance_id + ) + if seq_avg_section is not None and len(sweep_sections): avg_children = self._section_tree.get(sweep_sections[0].uid, []) sweep_children = self._section_tree.get(seq_avg_section.uid, []) @@ -636,6 +633,33 @@ def _extract_markers(self, operation): for k, v in markers_raw.items() ] + def _sweep_derived_param(self, param: Parameter): + base_swept_params = { + p.uid: s for s, pp in self._section_parameters.items() for p in pp + } + if param.uid in base_swept_params: + return + + # This parameter is not swept directly, but derived from a swept parameter; + # we must add it to the corresponding loop. + parent = param.driven_by[0] + self._sweep_derived_param(parent) + # the parent should now be added correctly, so try the initial test again + base_swept_params = { + p.uid: s for s, pp in self._section_parameters.items() for p in pp + } + assert parent.uid in base_swept_params + values_list = None + if param.values is not None: + values_list = list(param.values) + axis_name = param.axis_name + self.add_section_parameter( + base_swept_params[parent.uid], + param.uid, + values_list=values_list, + axis_name=axis_name, + ) + def _insert_section( self, section, @@ -646,8 +670,6 @@ def _insert_section( has_repeat = False count = 1 - _auto_pulse_id = (f"{section.uid}__auto_pulse_{i}" for i in itertools.count()) - if hasattr(section, "count"): has_repeat = True count = section.count @@ -671,7 +693,8 @@ def _insert_section( count = len(parameter.values) if count < 1: raise Exception( - f"Repeat count must be at least 1, but section {section.uid} has count={count}" + f"Repeat count must be at least 1, but section {section.uid}" + f" has count={count}" ) if ( section.execution_type is not None @@ -679,8 +702,8 @@ def _insert_section( and parameter.uid in self._nt_only_params ): raise Exception( - f"Parameter {parameter.uid} can't be swept in real-time, it is bound to a value " - f"that can only be set in near-time" + f"Parameter {parameter.uid} can't be swept in real-time, it is" + f" bound to a value that can only be set in near-time" ) execution_type = None @@ -719,6 +742,10 @@ def _insert_section( if hasattr(section, "handle"): handle = section.handle + user_register = None + if hasattr(section, "user_register"): + user_register = section.user_register + state = None if hasattr(section, "state"): state = section.state @@ -739,6 +766,13 @@ def _insert_section( # an acquire event - add acquisition_types acquisition_types = [acquisition_type.value] + play_after = getattr(section, "play_after", None) + if play_after: + section_uid = lambda x: x.uid if hasattr(x, "uid") else x + play_after = section_uid(play_after) + if isinstance(play_after, list): + play_after = [section_uid(s) for s in play_after] + self.add_section( instance_id, SectionInfo( @@ -755,10 +789,11 @@ def _insert_section( averaging_mode=averaging_mode, repetition_mode=repetition_mode, repetition_time=repetition_time, - play_after=getattr(section, "play_after", None), + play_after=play_after, reset_oscillator_phase=reset_oscillator_phase, trigger_output=trigger, handle=handle, + user_register=user_register, state=state, local=local, ), @@ -775,6 +810,17 @@ def _insert_section( continue self.add_section_signal(instance_id, operation.signal) + self._section_operations_to_add.append((section, acquisition_type, instance_id)) + + def _insert_section_operations( + self, + section, + acquisition_type, + exchanger_map: Callable[[Any], Any], + instance_id: str, + ): + + _auto_pulse_id = (f"{section.uid}__auto_pulse_{i}" for i in itertools.count()) for operation in exchanger_map(section).operations: if hasattr(operation, "signal"): pulse_offset = None @@ -791,6 +837,9 @@ def _insert_section( pulse_offset = None pulse_offset_param = operation.time.uid + if pulse_offset_param is not None: + self._sweep_derived_param(operation.time) + ssp = SectionSignalPulse( signal_id=operation.signal, offset=pulse_offset, @@ -812,6 +861,7 @@ def _insert_section( and not isinstance(operation_length, int) ): operation_length_param = operation_length.uid + self._sweep_derived_param(operation_length) operation_length = None if hasattr(operation, "pulse"): @@ -838,7 +888,8 @@ def _insert_section( if hasattr(operation, "handle") and pulse is None: raise RuntimeError( - f"Either 'kernel' or 'length' must be provided for the acquire operation with handle '{getattr(operation, 'handle')}'." + f"Either 'kernel' or 'length' must be provided for the" + f" acquire operation with handle '{getattr(operation, 'handle')}'." ) if pulse is not None: @@ -860,6 +911,8 @@ def _insert_section( amplitude, amplitude_param = find_value_or_parameter_attr( pulse, "amplitude", (float, int, complex) ) + if amplitude_param is not None: + self._sweep_derived_param(pulse.amplitude) can_compress = False if hasattr(pulse, "can_compress"): @@ -884,21 +937,31 @@ def _insert_section( ) = find_value_or_parameter_attr( operation, "amplitude", (int, float, complex) ) + if pulse_amplitude_param is not None: + self._sweep_derived_param(operation.amplitude) pulse_phase, pulse_phase_param = find_value_or_parameter_attr( operation, "phase", (int, float) ) + if pulse_phase_param is not None: + self._sweep_derived_param(operation.phase) ( pulse_increment_oscillator_phase, pulse_increment_oscillator_phase_param, ) = find_value_or_parameter_attr( operation, "increment_oscillator_phase", (int, float) ) + if pulse_increment_oscillator_phase_param is not None: + self._sweep_derived_param( + operation.increment_oscillator_phase + ) ( pulse_set_oscillator_phase, pulse_set_oscillator_phase_param, ) = find_value_or_parameter_attr( operation, "set_oscillator_phase", (int, float) ) + if pulse_set_oscillator_phase_param is not None: + self._sweep_derived_param(operation.set_oscillator_phase) acquire_params = None if hasattr(operation, "handle"): @@ -917,6 +980,7 @@ def _insert_section( if hasattr(val, "uid"): # Take the presence of "uid" as a proxy for isinstance(val, SweepParameter) pulse_parameters[param] = ParamRef(val.uid) + self._sweep_derived_param(val) if operation_pulse_parameters is not None: for param, val in operation_pulse_parameters.items(): if hasattr(val, "uid"): @@ -924,6 +988,7 @@ def _insert_section( operation_pulse_parameters[param] = ParamRef( val.uid ) + self._sweep_derived_param(val) if markers: for m in markers: @@ -972,12 +1037,18 @@ def _insert_section( ) = find_value_or_parameter_attr( operation, "increment_oscillator_phase", (int, float) ) + if pulse_increment_oscillator_phase_param is not None: + self._sweep_derived_param( + operation.increment_oscillator_phase + ) ( pulse_set_oscillator_phase, pulse_set_oscillator_phase_param, ) = find_value_or_parameter_attr( operation, "set_oscillator_phase", (int, float) ) + if pulse_set_oscillator_phase_param is not None: + self._sweep_derived_param(operation.set_oscillator_phase) for par in [ "precompensation_clear", "amplitude", @@ -1048,5 +1119,6 @@ def __getattr__(self, attr): if self._base is not None and hasattr(self._base, attr): return getattr(self._base, attr) raise AttributeError( - f"Field {attr} not found on overrider {self._overrider} (type {type(self._overrider)}) nor on base {self._base}" + f"Field {attr} not found on overrider {self._overrider}" + f" (type {type(self._overrider)}) nor on base {self._base}" ) diff --git a/laboneq/compiler/experiment_access/experiment_dao.py b/laboneq/compiler/experiment_access/experiment_dao.py index f4c9f0c..6a6c50f 100644 --- a/laboneq/compiler/experiment_access/experiment_dao.py +++ b/laboneq/compiler/experiment_access/experiment_dao.py @@ -12,15 +12,14 @@ from laboneq._utils import cached_method from laboneq.compiler.experiment_access import json_dumper -from laboneq.compiler.experiment_access.device_info import DeviceInfo from laboneq.compiler.experiment_access.dsl_loader import DSLLoader from laboneq.compiler.experiment_access.json_loader import JsonLoader -from laboneq.compiler.experiment_access.oscillator_info import OscillatorInfo from laboneq.compiler.experiment_access.section_info import SectionInfo from laboneq.compiler.experiment_access.signal_info import SignalInfo from laboneq.core.exceptions import LabOneQException from laboneq.core.types.enums import AcquisitionType from laboneq.core.validators import dicts_equal +from laboneq.data.compilation_job import DeviceInfo, OscillatorInfo, ParameterInfo _logger = logging.getLogger(__name__) @@ -108,24 +107,17 @@ def signals(self) -> list[str]: return sorted([s["signal_id"] for s in self._data["signals"].values()]) def devices(self) -> List[str]: - return [d["id"] for d in self._data["devices"].values()] + return [d["uid"] for d in self._data["devices"].values()] def global_leader_device(self) -> str: - try: - return next( - d for d in self._data["devices"].values() if d.get("is_global_leader") - )["id"] - except StopIteration: - return None + return self._data["global_leader_device_id"] @classmethod def _device_info_keys(cls): return [ - "id", + "uid", "device_type", - "serial", - "server", - "interface", + "reference_clock", "reference_clock_source", "is_qc", ] @@ -159,7 +151,7 @@ def _device_types_in_section_no_descend(self, section_id): return { d["device_type"] for d in self._data["devices"].values() - if d["id"] in devices + if d["uid"] in devices } def device_types_in_section(self, section_id): @@ -178,7 +170,6 @@ def _signal_info_keys(cls): "signal_id", "signal_type", "device_id", - "device_serial", "device_type", "connection_type", "channels", @@ -200,7 +191,6 @@ def signal_info(self, signal_id) -> SignalInfo: device_info = self._data["devices"][signal_connection["device_id"]] signal_info_copy["device_type"] = device_info["device_type"] - signal_info_copy["device_serial"] = device_info["serial"] return SignalInfo( **{k: signal_info_copy[k] for k in self._signal_info_keys()} ) @@ -323,9 +313,9 @@ def pulse(self, pulse_id): @classmethod def _oscillator_info_fields(cls): - return ["id", "frequency", "frequency_param", "hardware"] + return ["uid", "frequency", "is_hardware"] - def oscillator_info(self, oscillator_id) -> OscillatorInfo: + def oscillator_info(self, oscillator_id) -> OscillatorInfo | None: oscillator = self._data["oscillators"].get(oscillator_id) if oscillator is None: return None @@ -339,11 +329,11 @@ def hardware_oscillators(self) -> List[OscillatorInfo]: device_oscillators = self.device_oscillators(device) for oscillator_id in device_oscillators: info = self.oscillator_info(oscillator_id) - if info is not None and info.hardware: + if info is not None and info.is_hardware: info.device_id = device oscillator_infos.append(info) - return list(sorted(oscillator_infos, key=lambda x: (x.device_id, x.id))) + return sorted(oscillator_infos, key=lambda x: x.uid) def device_oscillators(self, device_id): return [ @@ -419,17 +409,14 @@ def markers_on_signal(self, signal_id: str): def triggers_on_signal(self, signal_id: str): return self._data["signal_trigger"].get(signal_id) - def section_parameters(self, section_id): - return [ - {k: p.get(k) for k in ["id", "start", "step", "values", "axis_name"]} - for p in self._data["section_parameters"].get(section_id, []) - ] + def section_parameters(self, section_id) -> list[ParameterInfo]: + return self._data["section_parameters"].get(section_id, []) def validate_experiment(self): all_parameters = set() for section_id in self.sections(): for parameter in self.section_parameters(section_id): - all_parameters.add(parameter["id"]) + all_parameters.add(parameter.uid) for section_id in self.sections(): for signal_id in self.section_signals(section_id): diff --git a/laboneq/compiler/experiment_access/json_dumper.py b/laboneq/compiler/experiment_access/json_dumper.py index 95ef5cb..3836c8f 100644 --- a/laboneq/compiler/experiment_access/json_dumper.py +++ b/laboneq/compiler/experiment_access/json_dumper.py @@ -5,6 +5,8 @@ import typing +from laboneq.data.compilation_job import ParameterInfo + if typing.TYPE_CHECKING: from laboneq.compiler.experiment_access import ExperimentDAO @@ -35,9 +37,10 @@ def dump(experiment_dao: ExperimentDAO): device_info = experiment_dao.device_info(device) device_entry = {} - for key in ["id", "serial", "interface", "reference_clock_source", "is_qc"]: + for key in ["reference_clock_source", "is_qc"]: if getattr(device_info, key) is not None: device_entry[key] = getattr(device_info, key) + device_entry["id"] = device_info.uid device_entry["driver"] = device_info.device_type.lower() oscillator_ids = experiment_dao.device_oscillators(device) @@ -46,8 +49,6 @@ def dump(experiment_dao: ExperimentDAO): device_entry["oscillators_list"] = [ {"$ref": oscillator_id} for oscillator_id in oscillator_ids ] - if device_info.server is not None: - device_entry["server"] = {"$ref": device_info.server} device_entries[device_entry["id"]] = device_entry reference_clock = experiment_dao.device_reference_clock(device) @@ -94,12 +95,12 @@ def dump(experiment_dao: ExperimentDAO): out_oscillators = [] for oscillator_info in oscillator_infos: frequency = oscillator_info.frequency - if oscillator_info.frequency_param is not None: - frequency = {"$ref": oscillator_info.frequency_param} + if isinstance(oscillator_info.frequency, ParameterInfo): + frequency = {"$ref": oscillator_info.frequency.uid} out_oscillator_entry = { - "id": oscillator_info.id, + "id": oscillator_info.uid, "frequency": frequency, - "hardware": oscillator_info.hardware, + "hardware": oscillator_info.is_hardware, } out_oscillators.append(out_oscillator_entry) if len(out_oscillators) > 0: @@ -122,7 +123,7 @@ def dump(experiment_dao: ExperimentDAO): signal_entry["offset"] = signal_info.offset signal_oscillator = experiment_dao.signal_oscillator(signal_info.signal_id) if signal_oscillator is not None: - signal_entry["oscillators_list"] = [{"$ref": signal_oscillator.id}] + signal_entry["oscillators_list"] = [{"$ref": signal_oscillator.uid}] retval["signals"].append(signal_entry) device_id = experiment_dao.device_from_signal(signal_info.signal_id) @@ -242,11 +243,11 @@ def dump(experiment_dao: ExperimentDAO): if len(section_parameters) > 0: out_section["repeat"]["parameters"] = [] for parameter in section_parameters: - param_object = {"id": parameter["id"]} + param_object = {"id": parameter.uid} keys = ["start", "step", "values"] for key in keys: - if parameter.get(key) is not None: - param_object[key] = parameter[key] + if getattr(parameter, key) is not None: + param_object[key] = getattr(parameter, key) out_section["repeat"]["parameters"].append(param_object) @@ -268,6 +269,7 @@ def dump(experiment_dao: ExperimentDAO): "averaging_mode", "play_after", "handle", + "user_register", "state", "local", ] diff --git a/laboneq/compiler/experiment_access/json_loader.py b/laboneq/compiler/experiment_access/json_loader.py index e32f928..d7fe4f2 100644 --- a/laboneq/compiler/experiment_access/json_loader.py +++ b/laboneq/compiler/experiment_access/json_loader.py @@ -1,6 +1,8 @@ # Copyright 2022 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + import copy import json import logging @@ -25,7 +27,7 @@ def find_value_or_parameter_dict( pulse_ref: Dict[str, Any], name: str, types: Tuple[type, ...] -): +) -> tuple[float | None, str | None]: param = None value = pulse_ref.get(name) if value is not None and not isinstance(value, types): @@ -59,41 +61,18 @@ def _load_servers(self, experiment): def _load_devices(self, experiment): for device in sorted(experiment["devices"], key=lambda x: x["id"]): - if "server" in device: - server = device["server"]["$ref"] - else: - server = None if "driver" in device: driver = device["driver"] else: driver = device["device_type"] - if "serial" in device: - serial = device["serial"] - else: - serial = None - - if "interface" in device: - interface = device["interface"] - else: - interface = None - - if ( - "reference_clock_source" in device - and device["reference_clock_source"] is not None - ): - reference_clock_source = device["reference_clock_source"] - else: - reference_clock_source = None + reference_clock_source = device.get("reference_clock_source") is_qc = device.get("is_qc") self.add_device( - device["id"], - driver, - serial, - server, - interface, + device_id=device["id"], + device_type=driver, reference_clock_source=reference_clock_source, is_qc=is_qc, ) @@ -103,25 +82,20 @@ def _load_devices(self, experiment): self.add_device_oscillator(device["id"], oscillator_ref["$ref"]) def _load_oscillator(self, experiment): - if "oscillators" in experiment: - for oscillator in experiment["oscillators"]: - frequency = None - frequency_param = None - if "frequency" in oscillator: - frequency = oscillator["frequency"] - if not isinstance(frequency, float) and not isinstance( - frequency, int - ): - if frequency is not None and "$ref" in frequency: - frequency_param = frequency["$ref"] - frequency = None - - self.add_oscillator( - oscillator["id"], - frequency, - frequency_param, - bool(oscillator["hardware"]), - ) + for oscillator in experiment.get("oscillators", []): + if (frequency := oscillator.get("frequency")) is None: + continue + if not isinstance(frequency, (int, float)): + if "$ref" in frequency: + frequency = self._get_or_create_parameter(frequency["$ref"]) + else: + frequency = None + + self.add_oscillator( + oscillator["id"], + frequency, + bool(oscillator["hardware"]), + ) def _load_connectivity(self, experiment): if "connectivity" in experiment: @@ -131,7 +105,7 @@ def _load_connectivity(self, experiment): if "leader" in experiment["connectivity"]: leader_device_id = experiment["connectivity"]["leader"]["$ref"] - self._devices[leader_device_id]["is_global_leader"] = True + self.global_leader_device_id = leader_device_id if "reference_clock" in experiment["connectivity"]: reference_clock = experiment["connectivity"]["reference_clock"] @@ -316,6 +290,10 @@ def _load_sections(self, experiment): if "handle" in section: handle = section["handle"] + user_register = None + if "user_register" in section: + user_register = section["user_register"] + state = None if "state" in section: state = section["state"] @@ -369,6 +347,7 @@ def _load_sections(self, experiment): reset_oscillator_phase=reset_oscillator_phase, trigger_output=trigger_output, handle=handle, + user_register=user_register, state=state, local=local, ), diff --git a/laboneq/compiler/experiment_access/loader_base.py b/laboneq/compiler/experiment_access/loader_base.py index f870e5e..3c7af3b 100644 --- a/laboneq/compiler/experiment_access/loader_base.py +++ b/laboneq/compiler/experiment_access/loader_base.py @@ -9,6 +9,7 @@ from laboneq.compiler.experiment_access.section_signal_pulse import SectionSignalPulse from laboneq.core.exceptions import LabOneQException from laboneq.core.types.enums import AcquisitionType +from laboneq.data.compilation_job import ParameterInfo logger = logging.getLogger(__name__) @@ -22,6 +23,7 @@ def __init__(self): # leader_uid, follower_uid self.dios: list[tuple[str, str]] = [] + self.global_leader_device_id: str = None self._devices = {} self._device_oscillators = {} @@ -41,6 +43,8 @@ def __init__(self): self._root_sections = [] self._handle_acquires = {} + self._all_parameters: dict[str, ParameterInfo] = {} + def data(self) -> dict[str, Any]: return { "devices": self._devices, @@ -60,6 +64,7 @@ def data(self) -> dict[str, Any]: "signal_oscillator": self._signal_oscillator, "signal_trigger": self._signal_trigger, "handle_acquires": self._handle_acquires, + "global_leader_device_id": self.global_leader_device_id, } def add_device_oscillator(self, device_id, oscillator_id): @@ -70,6 +75,12 @@ def add_device_oscillator(self, device_id, oscillator_id): } ) + def _get_or_create_parameter(self, parameter_id) -> ParameterInfo: + if (parameter := self._all_parameters.get(parameter_id)) is not None: + return parameter + param = self._all_parameters[parameter_id] = ParameterInfo(uid=parameter_id) + return param + def add_section_parameter( self, section_id, @@ -79,16 +90,24 @@ def add_section_parameter( values_list=None, axis_name=None, ): - self._section_parameters.setdefault(section_id, []).append( - { - "section_id": section_id, - "id": parameter_id, - "start": start, - "step": step, - "values": values_list, - "axis_name": axis_name, - } - ) + param = self._get_or_create_parameter(parameter_id) + if ( + param.start is not None + or param.step is not None + or param.values is not None + or param.axis_name is not None + ): + raise LabOneQException( + "Illegal nesting of multiple real-time sweeps over same parameter: {}".format( + parameter_id + ) + ) + param.start = start + param.step = step + param.values = values_list + param.axis_name = axis_name + + self._section_parameters.setdefault(section_id, []).append(param) def add_section_signal(self, section_uid, signal_uid): self._section_signals.setdefault(section_uid, set()).add(signal_uid) @@ -119,32 +138,23 @@ def add_device( self, device_id, device_type, - serial, - server, - interface, - is_global_leader=None, reference_clock=None, reference_clock_source=None, is_qc=None, ): self._devices[device_id] = { - "id": device_id, + "uid": device_id, "device_type": device_type, - "serial": serial, - "server": server, - "interface": interface, - "is_global_leader": is_global_leader, "reference_clock": reference_clock, "reference_clock_source": reference_clock_source, "is_qc": is_qc, } - def add_oscillator(self, oscillator_id, frequency, frequency_param, is_hardware): + def add_oscillator(self, oscillator_id, frequency, is_hardware): self._oscillators[oscillator_id] = { - "id": oscillator_id, + "uid": oscillator_id, "frequency": frequency, - "frequency_param": frequency_param, - "hardware": is_hardware, + "is_hardware": is_hardware, } def add_signal(self, signal_id, signal_type, modulation, offset=None): @@ -162,6 +172,10 @@ def add_signal_connection(self, signal_id, signal_connection): self._signal_connections[signal_id] = signal_connection def add_section(self, section_id, section_info: SectionInfo): + if section_info.handle is not None and section_info.user_register is not None: + raise LabOneQException( + f"Section {section_id} has both a handle and a user register set." + ) self._sections[section_id] = section_info def add_pulse(self, pulse_id, pulse_def: PulseDef): diff --git a/laboneq/compiler/experiment_access/oscillator_info.py b/laboneq/compiler/experiment_access/oscillator_info.py deleted file mode 100644 index 649bba1..0000000 --- a/laboneq/compiler/experiment_access/oscillator_info.py +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright 2022 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -from __future__ import annotations - -from dataclasses import dataclass - - -@dataclass -class OscillatorInfo: - id: str - frequency: float - frequency_param: str - hardware: bool - device_id: str = None diff --git a/laboneq/compiler/experiment_access/pulse_def.py b/laboneq/compiler/experiment_access/pulse_def.py index 7658563..76c98cf 100644 --- a/laboneq/compiler/experiment_access/pulse_def.py +++ b/laboneq/compiler/experiment_access/pulse_def.py @@ -25,15 +25,6 @@ class PulseDef: def effective_amplitude(self) -> float: return 1.0 if self.amplitude is None else self.amplitude - @staticmethod - def effective_length(pulse_def: PulseDef, sampling_rate: float) -> float: - if pulse_def is None: - return None - length = pulse_def.length - if length is None and pulse_def.samples is not None: - length = len(pulse_def.samples) / sampling_rate - return length - def __eq__(self, other: PulseDef): if isinstance(other, PulseDef): for k, v in asdict(self).items(): diff --git a/laboneq/compiler/experiment_access/section_info.py b/laboneq/compiler/experiment_access/section_info.py index dd48e3e..326ccdc 100644 --- a/laboneq/compiler/experiment_access/section_info.py +++ b/laboneq/compiler/experiment_access/section_info.py @@ -24,6 +24,7 @@ class SectionInfo: play_after: Optional[Union[str, List[str]]] reset_oscillator_phase: bool handle: Optional[str] + user_register: Optional[int] state: Optional[int] local: Optional[bool] section_display_name: Optional[str] = None diff --git a/laboneq/compiler/experiment_access/signal_info.py b/laboneq/compiler/experiment_access/signal_info.py index d02c199..6595413 100644 --- a/laboneq/compiler/experiment_access/signal_info.py +++ b/laboneq/compiler/experiment_access/signal_info.py @@ -11,7 +11,6 @@ class SignalInfo: signal_id: str signal_type: str device_id: str - device_serial: str device_type: str connection_type: str channels: str diff --git a/laboneq/compiler/qccs-schema_2_5_0.json b/laboneq/compiler/qccs-schema_2_5_0.json index 1f57cbd..636f50b 100644 --- a/laboneq/compiler/qccs-schema_2_5_0.json +++ b/laboneq/compiler/qccs-schema_2_5_0.json @@ -685,7 +685,8 @@ "type": "number" }, { - "type": "null"} + "type": "null" + } ] }, "length": { @@ -694,7 +695,8 @@ "type": "number" }, { - "type": "null"} + "type": "null" + } ] }, "waveform": { @@ -796,6 +798,14 @@ "type": "string", "default": "" }, + "user_register": { + "type": "number", + "default": null + }, + "state": { + "type": "number", + "default": null + }, "trigger_output": { "$ref": "#/definitions/trigger-outputs-def" } diff --git a/laboneq/compiler/scheduler/loop_iteration_schedule.py b/laboneq/compiler/scheduler/loop_iteration_schedule.py index 883be31..ea77068 100644 --- a/laboneq/compiler/scheduler/loop_iteration_schedule.py +++ b/laboneq/compiler/scheduler/loop_iteration_schedule.py @@ -10,6 +10,7 @@ from laboneq.compiler.common.compiler_settings import CompilerSettings from laboneq.compiler.common.event_type import EventType from laboneq.compiler.scheduler.section_schedule import SectionSchedule +from laboneq.data.compilation_job import ParameterInfo @define(kw_only=True, slots=True) @@ -17,7 +18,7 @@ class LoopIterationSchedule(SectionSchedule): """Schedule of a single iteration of a loop (sweep or average)""" iteration: int - sweep_parameters: List[Dict] + sweep_parameters: List[ParameterInfo] num_repeats: int shadow: bool @@ -62,9 +63,9 @@ def generate_event_list( event_type=EventType.PARAMETER_SET, time=start, section_name=self.section, - parameter={"id": param["id"]}, + parameter={"id": param.uid}, iteration=self.iteration, - value=param["values"][self.iteration], + value=param.values[self.iteration], ) for param in self.sweep_parameters ], diff --git a/laboneq/compiler/scheduler/loop_schedule.py b/laboneq/compiler/scheduler/loop_schedule.py index 8fc662a..845d451 100644 --- a/laboneq/compiler/scheduler/loop_schedule.py +++ b/laboneq/compiler/scheduler/loop_schedule.py @@ -14,12 +14,13 @@ from laboneq.compiler.scheduler.utils import ceil_to_grid, lcm from laboneq.core.exceptions.laboneq_exception import LabOneQException from laboneq.core.types.enums.repetition_mode import RepetitionMode +from laboneq.data.compilation_job import ParameterInfo @define(kw_only=True, slots=True) class LoopSchedule(SectionSchedule): compressed: bool - sweep_parameters: List[Dict] + sweep_parameters: List[ParameterInfo] iterations: int repetition_mode: Optional[RepetitionMode] repetition_time: Optional[int] @@ -196,7 +197,7 @@ def from_section_schedule( cls, schedule: SectionSchedule, compressed: bool, - sweep_parameters: List[Dict], + sweep_parameters: List[ParameterInfo], iterations: int, repetition_mode: Optional[RepetitionMode], repetition_time: Optional[int], diff --git a/laboneq/compiler/scheduler/match_schedule.py b/laboneq/compiler/scheduler/match_schedule.py index cabde4b..56bf29d 100644 --- a/laboneq/compiler/scheduler/match_schedule.py +++ b/laboneq/compiler/scheduler/match_schedule.py @@ -6,7 +6,7 @@ import logging import math from dataclasses import dataclass -from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Tuple +from typing import TYPE_CHECKING, Dict, Iterable, Iterator, List, Optional, Tuple from attrs import define from zhinst.utils.feedback_model import ( @@ -243,8 +243,9 @@ def _compute_start_with_latency( @define(kw_only=True, slots=True) class MatchSchedule(SectionSchedule): - handle: str - local: bool + handle: Optional[str] + user_register: Optional[int] + local: Optional[bool] def __attrs_post_init__(self): super().__attrs_post_init__() @@ -253,22 +254,24 @@ def __attrs_post_init__(self): def _calculate_timing( self, schedule_data: ScheduleData, start: int, start_may_change ) -> int: - if start_may_change: - raise LabOneQException( - f"Match section '{self.section}' with handle '{self.handle}' may not be" - " a subsection of a right-aligned section or within a loop with" - " repetition mode AUTO." - ) + if self.handle is not None: + assert self.local is not None + if start_may_change: + raise LabOneQException( + f"Match section '{self.section}' with handle '{self.handle}' may not be" + " a subsection of a right-aligned section or within a loop with" + " repetition mode AUTO." + ) - start = _compute_start_with_latency( - schedule_data, - start, - self.local, - self.handle, - self.section, - self.signals, - self.grid, - ) + start = _compute_start_with_latency( + schedule_data, + start, + self.local, + self.handle, + self.section, + self.signals, + self.grid, + ) for c in self.children: assert isinstance(c, CaseSchedule) @@ -297,6 +300,7 @@ def generate_event_list( section_start_event = events[0] assert section_start_event["event_type"] == EventType.SECTION_START section_start_event["handle"] = self.handle + section_start_event["user_register"] = self.user_register section_start_event["local"] = self.local return events diff --git a/laboneq/compiler/scheduler/parameter_store.py b/laboneq/compiler/scheduler/parameter_store.py index 6c37e3a..0c58d7d 100644 --- a/laboneq/compiler/scheduler/parameter_store.py +++ b/laboneq/compiler/scheduler/parameter_store.py @@ -8,8 +8,6 @@ from contextlib import contextmanager from typing import Dict, Set -from laboneq.core.exceptions import LabOneQException - class QueryTracker: """Tracks the queries made to the parameter store. @@ -77,13 +75,7 @@ def extend(self, other: Dict): This is useful for tracking the parameters used in a sub-block. """ - if self.keys() & other.keys() != set(): - reused_params = self.keys() & other.keys() - raise LabOneQException( - "Illegal nesting of multiple real-time sweeps over same parameter: {}".format( - ", ".join(reused_params) - ) - ) + assert self.keys().isdisjoint(other.keys()) self.update(other) yield for key in other: diff --git a/laboneq/compiler/scheduler/pulse_phase.py b/laboneq/compiler/scheduler/pulse_phase.py index d29c46a..b0d4431 100644 --- a/laboneq/compiler/scheduler/pulse_phase.py +++ b/laboneq/compiler/scheduler/pulse_phase.py @@ -59,11 +59,11 @@ def calculate_osc_phase(event_list, experiment_dao: ExperimentDAO): ]: incremented_phase = oscillator_phase_cumulative.get(signal_id, 0.0) - if oscillator_info.hardware: + if oscillator_info.is_hardware: if signal_id in oscillator_phase_sets: raise LabOneQException( f"There are set_oscillator_phase entries for signal " - f"'{signal_id}', but oscillator '{oscillator_info.id}' " + f"'{signal_id}', but oscillator '{oscillator_info.uid}' " f"is a hardware oscillator. Setting absolute phase is " f"not supported for hardware oscillators." ) diff --git a/laboneq/compiler/scheduler/scheduler.py b/laboneq/compiler/scheduler/scheduler.py index 1e533ed..69f6fae 100644 --- a/laboneq/compiler/scheduler/scheduler.py +++ b/laboneq/compiler/scheduler/scheduler.py @@ -63,6 +63,7 @@ ) from laboneq.core.exceptions import LabOneQException from laboneq.core.types.enums import RepetitionMode, SectionAlignment +from laboneq.data.compilation_job import ParameterInfo if TYPE_CHECKING: from laboneq.compiler.common.signal_obj import SignalObj @@ -162,7 +163,7 @@ def _start_events(self): retval.append( { "event_type": EventType.INITIAL_RESET_HW_OSCILLATOR_PHASE, - "device_id": device_info.id, + "device_id": device_info.uid, "duration": device_type.reset_osc_duration, "time": 0, } @@ -228,17 +229,15 @@ def _schedule_section( section_info = self._experiment_dao.section_info(section_id) sweep_parameters = self._experiment_dao.section_parameters(section_id) for param in sweep_parameters: - if "values" not in param or param["values"] is None: - param["values"] = ( - param["start"] + np.arange(section_info.count) * param["step"] - ) + if param.values is None: + param.values = param.start + np.arange(section_info.count) * param.step is_loop = section_info.has_repeat if is_loop: schedule = self._schedule_loop( section_id, section_info, current_parameters, sweep_parameters ) - elif section_info.handle is not None: + elif section_info.handle is not None or section_info.user_register is not None: schedule = self._schedule_match( section_id, section_info, current_parameters ) @@ -271,17 +270,19 @@ def _swept_hw_oscillators( # Not every signal has an oscillator (e.g. flux lines), so check for None if oscillator is None: continue - param = oscillator.frequency_param - if param in sweep_parameters and oscillator.hardware: + if not isinstance(oscillator.frequency, ParameterInfo): + continue + param = oscillator.frequency + if param.uid in sweep_parameters and oscillator.is_hardware: if ( - param in oscillator_param_lookup - and oscillator_param_lookup[param].id != oscillator.id + param.uid in oscillator_param_lookup + and oscillator_param_lookup[param.uid].id != oscillator.uid ): raise LabOneQException( "Hardware frequency sweep may drive only a single oscillator" ) - oscillator_param_lookup[param] = SweptHardwareOscillator( - id=oscillator.id, device=signal_info.device_id, signal=signal + oscillator_param_lookup[param.uid] = SweptHardwareOscillator( + id=oscillator.uid, device=signal_info.device_id, signal=signal ) return oscillator_param_lookup @@ -291,7 +292,7 @@ def _schedule_loop( section_id, section_info: SectionInfo, current_parameters: ParameterStore[str, float], - sweep_parameters: List[Dict], + sweep_parameters: List[ParameterInfo], ) -> LoopSchedule: """Schedule the individual iterations of the loop ``section_id``. @@ -313,8 +314,8 @@ def _schedule_loop( children_schedules = [] for param in sweep_parameters: - if param["values"] is not None: - assert len(param["values"]) >= section_info.count + if param.values is not None: + assert len(param.values) >= section_info.count # todo: unroll loops that are too short if len(sweep_parameters) == 0: compressed = section_info.count > 1 @@ -332,7 +333,7 @@ def _schedule_loop( compressed = False signals = self._experiment_dao.section_signals_with_children(section_id) swept_hw_oscillators = self._swept_hw_oscillators( - {p["id"] for p in sweep_parameters}, signals + {p.uid for p in sweep_parameters}, signals ) if section_info.chunk_count > 1: @@ -348,10 +349,10 @@ def _schedule_loop( for local_iteration, global_iteration in enumerate(global_iterations): new_parameters = { - param["id"]: ( - param["values"][global_iteration] - if param["values"] is not None - else param["start"] + param["step"] * global_iteration + param.uid: ( + param.values[global_iteration] + if param.values is not None + else param.start + param.step * global_iteration ) for param in sweep_parameters } @@ -383,7 +384,7 @@ def _schedule_oscillator_frequency_step( self, swept_hw_oscillators: Dict[str, SweptHardwareOscillator], iteration: int, - sweep_parameters: List[Dict], + sweep_parameters: List[ParameterInfo], signals: Set[str], grid: int, section_id: str, @@ -399,12 +400,12 @@ def _schedule_oscillator_frequency_step( values = [] params = [] for param in sweep_parameters: - osc = swept_hw_oscillators.get(param["id"]) + osc = swept_hw_oscillators.get(param.uid) if osc is None: continue - values.append(param["values"][iteration]) + values.append(param.values[iteration]) swept_oscs_list.append(osc) - params.append(param["id"]) + params.append(param.uid) device_id = osc.device device_info = self._experiment_dao.device_info(device_id) device_type = DeviceType(device_info.device_type) @@ -493,7 +494,7 @@ def _schedule_loop_iteration( global_iteration: int, num_repeats: int, all_parameters: ParameterStore[str, float], - sweep_parameters: List[Dict], + sweep_parameters: List[ParameterInfo], swept_hw_oscillators: Dict[str, SweptHardwareOscillator], ) -> LoopIterationSchedule: """Schedule a single iteration of a loop. @@ -524,7 +525,7 @@ def _schedule_loop_iteration( section_id ): osc_info = self._experiment_dao.signal_oscillator(signal) - if osc_info is not None and osc_info.hardware: + if osc_info is not None and osc_info.is_hardware: hw_osc_reset_signals.add(signal) for _, osc in swept_hw_oscillators.items(): @@ -709,15 +710,15 @@ def resolve_pulse_params(params: Dict[str, Any]): osc = self._experiment_dao.signal_oscillator(pulse.signal_id) if osc is None: freq = None - elif not osc.hardware and osc.frequency_param is not None: + elif not osc.is_hardware and isinstance(osc.frequency, ParameterInfo): try: - freq = current_parameters[osc.frequency_param] + freq = current_parameters[osc.frequency.uid] except KeyError as e: raise LabOneQException( f"Playback of pulse '{pulse.pulse_id}' in section '{section} " f"requires the parameter '{osc.frequency_param}' to set the frequency." ) from e - elif osc is None or osc.hardware: + elif osc is None or osc.is_hardware: freq = None else: freq = osc.frequency if osc is not None else None @@ -751,6 +752,11 @@ def _schedule_match( current_parameters: ParameterStore[str, float], ) -> MatchSchedule: + assert section_info.handle is not None or section_info.user_register is not None + handle: Optional[str] = section_info.handle + user_register: Optional[int] = section_info.user_register + local: Optional[bool] = section_info.local + dao = self._schedule_data.experiment_dao children_schedules = [] section_children = dao.direct_section_children(section_id) @@ -777,38 +783,40 @@ def _schedule_match( state=cs.state, ) - assert section_info.handle is not None - handle: str = section_info.handle - local: bool = section_info.local - try: - acquire_signal = dao.acquisition_signal(handle) - except KeyError as e: - raise LabOneQException(f"No acquisition with handle '{handle}'") from e - acquire_device = dao.device_from_signal(acquire_signal) - match_devices = {dao.device_from_signal(s) for s in signals} - - # todo: this is a brittle check for SHFQC - local_feedback_allowed = match_devices == {f"{acquire_device}_sg"} - - if local is None: - local = local_feedback_allowed - elif local and not local_feedback_allowed: - raise LabOneQException( - f"Local feedback not possible across devices {acquire_device} and {', '.join(match_devices)}" + if handle: + try: + acquire_signal = dao.acquisition_signal(handle) + except KeyError as e: + raise LabOneQException(f"No acquisition with handle '{handle}'") from e + acquire_device = dao.device_from_signal(acquire_signal) + match_devices = {dao.device_from_signal(s) for s in signals} + + # todo: this is a brittle check for SHFQC + local_feedback_allowed = match_devices == {f"{acquire_device}_sg"} + + if local is None: + local = local_feedback_allowed + elif local and not local_feedback_allowed: + raise LabOneQException( + f"Local feedback not possible across devices {acquire_device} and {', '.join(match_devices)}" + ) + + compressed_loop_grid = round( + ( + (8 if local else 200) + / self._sampling_rate_tracker.sampling_rate_for_device( + acquire_device + ) + / self._TINYSAMPLE + ) ) + else: + compressed_loop_grid = None play_after = section_info.play_after or [] if isinstance(play_after, str): play_after = [play_after] - compressed_loop_grid = round( - ( - (8 if local else 200) - / self._sampling_rate_tracker.sampling_rate_for_device(acquire_device) - / self._TINYSAMPLE - ) - ) - return MatchSchedule( grid=grid, length=to_tinysample(section_info.length, self._schedule_data.TINYSAMPLE), @@ -819,6 +827,7 @@ def _schedule_match( section=section_id, play_after=play_after, handle=handle, + user_register=user_register, local=local, compressed_loop_grid=compressed_loop_grid, ) @@ -838,7 +847,7 @@ def _schedule_case( section_info = self._schedule_data.experiment_dao.section_info(section_id) assert not section_info.has_repeat # case must not be a loop - assert section_info.handle is None + assert section_info.handle is None and section_info.user_register is None state = section_info.state assert state is not None @@ -931,10 +940,10 @@ def grid(self, *signal_ids: Iterable[str]) -> Tuple[int, int]: assert device is not None sample_rate = self._sampling_rate_tracker.sampling_rate_for_device( - device.id + device.uid ) sequencer_rate = self._sampling_rate_tracker.sequencer_rate_for_device( - device.id + device.uid ) signal_grid = int( diff --git a/laboneq/compiler/workflow/compiler.py b/laboneq/compiler/workflow/compiler.py index 213deec..1d16a95 100644 --- a/laboneq/compiler/workflow/compiler.py +++ b/laboneq/compiler/workflow/compiler.py @@ -26,7 +26,6 @@ ) from laboneq.compiler.common.signal_obj import SignalObj from laboneq.compiler.common.trigger_mode import TriggerMode -from laboneq.compiler.experiment_access.device_info import DeviceInfo from laboneq.compiler.experiment_access.experiment_dao import ExperimentDAO from laboneq.compiler.scheduler.sampling_rate_tracker import SamplingRateTracker from laboneq.compiler.scheduler.scheduler import Scheduler @@ -48,6 +47,7 @@ from laboneq.core.types.compiled_experiment import CompiledExperiment from laboneq.core.types.enums.acquisition_type import AcquisitionType, is_spectroscopy from laboneq.core.types.enums.mixer_type import MixerType +from laboneq.data.compilation_job import DeviceInfo, ParameterInfo from laboneq.data.scheduled_experiment import ScheduledExperiment from laboneq.executor.execution_from_experiment import ExecutionFactoryFromExperiment from laboneq.executor.executor import Statement @@ -135,7 +135,6 @@ def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: for signal_id in self._experiment_dao.signals() ] used_devices = set(info.device_type for info in signal_infos) - used_device_serials = set(info.device_serial for info in signal_infos) if ( "hdawg" in used_devices and "uhfqa" in used_devices @@ -146,12 +145,15 @@ def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: + "instruments are not supported" ) + standalone_qc = len(self._experiment_dao.devices()) <= 2 and all( + dev.is_qc for dev in device_infos + ) self._leader_properties.is_desktop_setup = not has_pqsc and ( used_devices == {"hdawg"} or used_devices == {"shfsg"} or used_devices == {"shfqa"} or used_devices == {"shfqa", "shfsg"} - and len(used_device_serials) == 1 # SHFQC + or standalone_qc or used_devices == {"hdawg", "uhfqa"} or (used_devices == {"uhfqa"} and has_hdawg) # No signal on leader ) @@ -169,15 +171,15 @@ def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: if self._leader_properties.is_desktop_setup: if leader is None: if has_hdawg: - leader = get_first_instr_of(device_infos, "hdawg").id + leader = get_first_instr_of(device_infos, "hdawg").uid elif has_shfqa: - leader = get_first_instr_of(device_infos, "shfqa").id + leader = get_first_instr_of(device_infos, "shfqa").uid if has_shfsg: # SHFQC self._leader_properties.internal_followers = [ - get_first_instr_of(device_infos, "shfsg").id + get_first_instr_of(device_infos, "shfsg").uid ] elif has_shfsg: - leader = get_first_instr_of(device_infos, "shfsg").id + leader = get_first_instr_of(device_infos, "shfsg").uid _logger.debug("Using desktop setup configuration with leader %s", leader) @@ -210,16 +212,16 @@ def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: if is_hdawg_solo: first_hdawg = get_first_instr_of(device_infos, "hdawg") if first_hdawg.reference_clock_source is None: - self._clock_settings[first_hdawg.id] = "internal" + self._clock_settings[first_hdawg.uid] = "internal" else: if not has_hdawg and has_shfsg: # SHFSG or SHFQC solo first_shfsg = get_first_instr_of(device_infos, "shfsg") if first_shfsg.reference_clock_source is None: - self._clock_settings[first_shfsg.id] = "internal" + self._clock_settings[first_shfsg.uid] = "internal" if not has_hdawg and has_shfqa: # SHFQA or SHFQC solo first_shfqa = get_first_instr_of(device_infos, "shfqa") if first_shfqa.reference_clock_source is None: - self._clock_settings[first_shfqa.id] = "internal" + self._clock_settings[first_shfqa.uid] = "internal" self._clock_settings["use_2GHz_for_HDAWG"] = has_shf self._leader_properties.global_leader = leader @@ -292,8 +294,8 @@ def _calc_osc_numbering(self): hw_osc_names = set() oscillator_info = self._experiment_dao.signal_oscillator(signal_id) - if oscillator_info is not None and oscillator_info.hardware: - hw_osc_names.add(oscillator_info.id) + if oscillator_info is not None and oscillator_info.is_hardware: + hw_osc_names.add(oscillator_info.uid) base_channel = min(signal_info.channels) min_osc_number = base_channel * 2 @@ -554,7 +556,7 @@ class DelayInfo: oscillator_info = self._experiment_dao.signal_oscillator(signal_id) if ( oscillator_info is not None - and not oscillator_info.hardware + and not oscillator_info.is_hardware and signal_info.modulation ): oscillator_frequency = oscillator_info.frequency @@ -568,8 +570,8 @@ class DelayInfo: self._shfqa_generator_allocation[signal_id]["channels"] ) hw_oscillator = ( - oscillator_info.id - if oscillator_info is not None and oscillator_info.hardware + oscillator_info.uid + if oscillator_info is not None and oscillator_info.is_hardware else None ) @@ -577,7 +579,7 @@ class DelayInfo: if ( device_type == DeviceType.UHFQA and oscillator_info - and oscillator_info.hardware + and oscillator_info.is_hardware ): mixer_type = MixerType.UHFQA_ENVELOPE elif signal_type in ("single",): @@ -629,10 +631,10 @@ def calc_outputs(self, signal_delays: SignalDelays): oscillator_info = self._experiment_dao.signal_oscillator(signal_id) oscillator_is_hardware = ( - oscillator_info is not None and oscillator_info.hardware + oscillator_info is not None and oscillator_info.is_hardware ) if oscillator_is_hardware: - osc_name = oscillator_info.id + osc_name = oscillator_info.uid oscillator_frequency = oscillator_info.frequency oscillator_number = self.osc_number(osc_name) @@ -686,17 +688,10 @@ def calc_outputs(self, signal_delays: SignalDelays): "amplitude": self._experiment_dao.amplitude(signal_id), } signal_is_modulated = signal_info.modulation - output_modulation_logic = { - (True, True): True, - (False, False): False, - (True, False): False, - (False, True): False, - } - if output_modulation_logic[ - (oscillator_is_hardware, signal_is_modulated) - ]: + + if oscillator_is_hardware and signal_is_modulated: output["modulation"] = True - if oscillator_frequency is None: + if isinstance(oscillator_frequency, ParameterInfo): oscillator_frequency = 0 else: output["modulation"] = False @@ -1061,7 +1056,7 @@ def compiler_output(self) -> CompiledExperiment: return CompiledExperiment( experiment_dict=ExperimentDAO.dump(self._experiment_dao), scheduled_experiment=ScheduledExperiment( - recipe=self._recipe, + recipe=self._recipe, # TODO(2K): Build 'Recipe' instead of dict src=self._combined_compiler_output.src, waves=list(self._combined_compiler_output.waves.values()), wave_indices=self._combined_compiler_output.wave_indices, diff --git a/laboneq/compiler/workflow/recipe_generator.py b/laboneq/compiler/workflow/recipe_generator.py index 231512b..ce24998 100644 --- a/laboneq/compiler/workflow/recipe_generator.py +++ b/laboneq/compiler/workflow/recipe_generator.py @@ -10,6 +10,7 @@ from laboneq.compiler.common.device_type import DeviceType from laboneq.compiler.experiment_access.experiment_dao import ExperimentDAO from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data.compilation_job import ParameterInfo if TYPE_CHECKING: from laboneq.compiler.workflow.compiler import LeaderProperties @@ -32,25 +33,25 @@ def __init__(self): self._recipe["experiment"] = {"realtime_execution_init": []} def add_oscillator_params(self, experiment_dao: ExperimentDAO): - hw_oscillators = {} - for oscillator in experiment_dao.hardware_oscillators(): - hw_oscillators[oscillator.id] = oscillator - oscillator_params = [] for signal_id in experiment_dao.signals(): signal_info = experiment_dao.signal_info(signal_id) oscillator_info = experiment_dao.signal_oscillator(signal_id) if oscillator_info is None: continue - if oscillator_info.hardware: - oscillator = hw_oscillators[oscillator_info.id] + if oscillator_info.is_hardware: + if isinstance(oscillator_info.frequency, ParameterInfo): + frequency, param = None, oscillator_info.frequency.uid + else: + frequency, param = oscillator_info.frequency, None + for ch in signal_info.channels: oscillator_param = { - "id": oscillator.id, - "device_id": oscillator.device_id, + "id": oscillator_info.uid, + "device_id": signal_info.device_id, "channel": ch, - "frequency": oscillator.frequency, - "param": oscillator.frequency_param, + "frequency": frequency, + "param": param, } oscillator_params.append(oscillator_param) @@ -100,9 +101,9 @@ def add_devices_from_experiment(self, experiment_dao: ExperimentDAO): initializations = [] for device in experiment_dao.device_infos(): devices.append( - {"device_uid": device.id, "driver": device.device_type.upper()} + {"device_uid": device.uid, "driver": device.device_type.upper()} ) - initializations.append({"device_uid": device.id, "config": {}}) + initializations.append({"device_uid": device.uid, "config": {}}) self._recipe["devices"] = devices self._recipe["experiment"]["initializations"] = initializations @@ -140,7 +141,7 @@ def add_connectivity_from_experiment( initialization["config"]["triggering_mode"] = "internal_follower" for device in experiment_dao.device_infos(): - device_uid = device.id + device_uid = device.uid initialization = self._find_initialization(device_uid) reference_clock = experiment_dao.device_reference_clock(device_uid) if reference_clock is not None: @@ -212,7 +213,9 @@ def add_output( if oscillator is not None: output["oscillator"] = oscillator output["modulation"] = modulation - if oscillator_frequency is not None: + if oscillator_frequency is not None and not isinstance( + oscillator_frequency, ParameterInfo + ): output["oscillator_frequency"] = oscillator_frequency if port_delay is not None: output["port_delay"] = port_delay diff --git a/laboneq/contrib/example_helpers/example_notebook_helper.py b/laboneq/contrib/example_helpers/example_notebook_helper.py index 3a8ffc2..ac315a9 100644 --- a/laboneq/contrib/example_helpers/example_notebook_helper.py +++ b/laboneq/contrib/example_helpers/example_notebook_helper.py @@ -4,6 +4,8 @@ """ Helper functions for definition of device setup and calibration settings """ +import numpy as np + from laboneq.contrib.example_helpers.descriptors.hdawg_uhfqa_pqsc import ( descriptor_hdawg_uhfqa_pqsc, ) @@ -14,6 +16,87 @@ from laboneq.core.types.enums import ModulationType from laboneq.dsl.calibration import Oscillator, SignalCalibration from laboneq.dsl.device import DeviceSetup +from laboneq.dsl.quantum.transmon import Transmon, TransmonParameters + + +# function to create a transmon qubit object from entries in a parameter dictionary +def create_transmon(index, base_parameters, device_setup): + q_name = "q" + str(index) + qubit = Transmon.from_logical_signal_group( + q_name, + lsg=device_setup.logical_signal_groups[q_name], + parameters=TransmonParameters( + resonance_frequency_ge=base_parameters["resonance_frequency_ge"][index], + resonance_frequency_ef=base_parameters["resonance_frequency_ge"][index], + drive_lo_frequency=base_parameters["drive_lo_frequency"][ + int(np.floor(index / 2)) + ], + readout_resonator_frequency=base_parameters["readout_resonator_frequency"][ + index + ], + readout_lo_frequency=base_parameters["readout_lo_frequency"][ + int(np.floor(index / 6)) + ], + readout_integration_delay=base_parameters["readout_integration_delay"], + drive_range=base_parameters["drive_range"], + readout_range_out=base_parameters["readout_range_out"], + readout_range_in=base_parameters["readout_range_in"], + user_defined={ + "amplitude_pi": base_parameters["amplitude_pi"], + "pulse_length": base_parameters["pulse_length"], + "readout_length": base_parameters["readout_length"], + "readout_amplitude": base_parameters["readout_amplitude"], + "reset_delay_length": base_parameters["reset_delay_length"], + }, + ), + ) + return qubit + + +# function to generate a set of base transmon qubit parameters, used as input to the create_transmon function +def generate_base_transmon_parameters( + number_of_qubits, + drive_centre_frequency=4e9, + readout_centre_frequency=6e9, +): + return { + "resonance_frequency_ge": [ + drive_centre_frequency + it * 100e6 + for _ in range(int(np.ceil(number_of_qubits / 6))) + for it in np.linspace(-2.5, 2.5, 6) + ], + "resonance_frequency_ef": [ + drive_centre_frequency - 250e6 + it * 100e6 + for _ in range(int(np.ceil(number_of_qubits / 6))) + for it in np.linspace(-2.5, 2.5, 6) + ], + "drive_lo_frequency": [ + drive_centre_frequency for _ in range(int(np.ceil(number_of_qubits / 2))) + ], + # readout resonatro frequency + "readout_resonator_frequency": [ + readout_centre_frequency + it * 100e6 + for _ in range(int(np.ceil(number_of_qubits / 6))) + for it in np.linspace(-2.5, 2.5, 6) + ], + # readout LO Frequency + "readout_lo_frequency": [ + readout_centre_frequency for _ in range(int(np.ceil(number_of_qubits / 6))) + ], + # collection of pulse parameters + "readout_length": 2e-6, + "readout_amplitude": 0.6, + "readout_integration_delay": 40e-9, + "pulse_length_spectroscopy": 2e-6, + "amplitude_pi": 0.66, + "pulse_length": 100e-9, + # range settings + "readout_range_out": 5, + "readout_range_in": 10, + "drive_range": 5, + # delay inserted after every readout + "reset_delay_length": 100e-9, + } # functions that modifies the calibration on a given device setup @@ -21,7 +104,7 @@ def calibrate_devices(device_setup): local_oscillator_shfsg = Oscillator(uid="lo_shfsg", frequency=5e9) local_oscillator_shfqa = Oscillator(uid="lo_shfqa", frequency=5.5e9) - ## qubit 0 + # qubit 0 # calibration setting for drive line for qubit 0 device_setup.logical_signal_groups["q0"].logical_signals[ "drive_line" @@ -65,7 +148,7 @@ def calibrate_devices(device_setup): delay_signal=0, # inserted in sequencer code, bound to waveform granularity local_oscillator=local_oscillator_shfqa, # will be ignored if the instrument is not an SHF* ) - ## qubit 1 + # qubit 1 # calibration setting for drive line for qubit 1 device_setup.logical_signal_groups["q1"].logical_signals[ "drive_line" diff --git a/laboneq/contrib/example_helpers/feedback_helper.py b/laboneq/contrib/example_helpers/feedback_helper.py index 46c116f..8210a33 100755 --- a/laboneq/contrib/example_helpers/feedback_helper.py +++ b/laboneq/contrib/example_helpers/feedback_helper.py @@ -8,8 +8,18 @@ from laboneq.core.types.enums import AcquisitionType, AveragingMode from laboneq.dsl.calibration import SignalCalibration -from laboneq.dsl.experiment import Experiment, pulse_library -from laboneq.dsl.parameter import LinearSweepParameter +from laboneq.dsl.experiment import pulse_library +from laboneq.dsl.experiment.builtins import ( + acquire, + acquire_loop_rt, + delay, + experiment, + experiment_calibration, + for_each, + map_signal, + play, + section, +) ## create readout pulse waveform with IQ encoded phase and optional software modulation @@ -24,81 +34,70 @@ def complex_freq_phase( return amplitude * np.exp(1j * 2 * np.pi * freq * time_axis + 1j * phase) -## Experiment for raw signal acquisition from measure pulse +@experiment(signals=["measure", "acquire"]) def exp_raw(measure_pulse, q0, pulse_len): - exp = Experiment(signals=["measure", "acquire"]) - exp.map_signal("measure", q0["measure_line"]) - exp.map_signal("acquire", q0["acquire_line"]) + """Experiment for raw signal acquisition from measure pulse""" + map_signal("measure", q0["measure_line"]) + map_signal("acquire", q0["acquire_line"]) - with exp.acquire_loop_rt(count=1024, acquisition_type=AcquisitionType.RAW): - exp.play(signal="measure", pulse=measure_pulse) - exp.acquire(signal="acquire", handle="raw", length=pulse_len) - exp.delay(signal="measure", time=10e-6) + with acquire_loop_rt(count=1024, acquisition_type=AcquisitionType.RAW): + play(signal="measure", pulse=measure_pulse) + acquire(signal="acquire", handle="raw", length=pulse_len) + delay(signal="measure", time=10e-6) - return exp - -## Experiment that plays two different measure pulses one after another and acquire results in single shot integration mode -## use custom integration kernel for data acquisition +@experiment(signals=["measure0", "measure1", "acquire"]) def exp_integration(measure0, measure1, q0, q1, samples_kernel, rotation_angle=0): + """Experiment that plays two different measure pulses one after another and acquire + results in single shot integration mode use custom integration kernel for data + acquisition. + """ kernel = pulse_library.sampled_pulse_complex( samples_kernel * np.exp(1j * rotation_angle) ) - exp = Experiment(signals=["measure0", "measure1", "acquire"]) - exp.map_signal("measure0", q0["measure_line"]) - exp.map_signal("measure1", q1["measure_line"]) - exp.map_signal("acquire", q0["acquire_line"]) + map_signal("measure0", q0["measure_line"]) + map_signal("measure1", q1["measure_line"]) + map_signal("acquire", q0["acquire_line"]) - with exp.acquire_loop_rt( + with acquire_loop_rt( count=1024, averaging_mode=AveragingMode.SINGLE_SHOT, acquisition_type=AcquisitionType.INTEGRATION, ): - with exp.section(): - exp.play(signal="measure0", pulse=measure0) - exp.acquire(signal="acquire", handle="data0", kernel=kernel) - exp.delay(signal="measure0", time=10e-6) - with exp.section(): - exp.play(signal="measure1", pulse=measure1) - exp.acquire(signal="acquire", handle="data1", kernel=kernel) - exp.delay(signal="measure1", time=10e-6) - return exp - - -## Experiment to test state discrimination by playing two different measure pulses one ofter the other and acquiring the state readout + with section(): + play(signal="measure0", pulse=measure0) + acquire(signal="acquire", handle="data0", kernel=kernel) + delay(signal="measure0", time=10e-6) + with section(): + play(signal="measure1", pulse=measure1) + acquire(signal="acquire", handle="data1", kernel=kernel) + delay(signal="measure1", time=10e-6) + + +@experiment(signals=["measure0", "measure1", "acquire"]) def exp_discrimination( measure0, measure1, q0, q1, samples_kernel, threshold=0, rotation_angle=0, num=50 ): + """Experiment to test state discrimination by playing two different measure pulses + one ofter the other and acquiring the state readout. + """ kernel = pulse_library.sampled_pulse_complex( samples_kernel * np.exp(1j * rotation_angle) ) - exp = Experiment(signals=["measure0", "measure1", "acquire"]) - exp.map_signal("measure0", q0["measure_line"]) - exp.map_signal("measure1", q1["measure_line"]) - exp.map_signal("acquire", q0["acquire_line"]) - - exp.signals["acquire"].calibration = SignalCalibration(threshold=threshold) - - repeat = LinearSweepParameter(start=0, stop=num - 1, count=num) - - with exp.acquire_loop_rt( - count=1024, acquisition_type=AcquisitionType.DISCRIMINATION - ): - with exp.sweep(parameter=repeat): - with exp.section(): - exp.play(signal="measure0", pulse=measure0) - exp.acquire(signal="acquire", handle="data0", kernel=kernel) - # with exp.match_local(handle="data0"): - # with exp.case(0): - # exp.play() - # with exp.case(1): - # pass - exp.delay(signal="measure0", time=1e-6) - with exp.section(): - exp.play(signal="measure1", pulse=measure1) - exp.acquire(signal="acquire", handle="data1", kernel=kernel) - exp.delay(signal="measure1", time=1e-6) - - return exp + map_signal("measure0", q0["measure_line"]) + map_signal("measure1", q1["measure_line"]) + map_signal("acquire", q0["acquire_line"]) + experiment_calibration()["acquire"] = SignalCalibration(threshold=threshold) + + with acquire_loop_rt(count=1024, acquisition_type=AcquisitionType.DISCRIMINATION): + with for_each(range(num)): + with section(): + play(signal="measure0", pulse=measure0) + acquire(signal="acquire", handle="data0", kernel=kernel) + delay(signal="measure0", time=1e-6) + with section(): + play(signal="measure1", pulse=measure1) + acquire(signal="acquire", handle="data1", kernel=kernel) + delay(signal="measure1", time=1e-6) diff --git a/laboneq/contrib/example_helpers/generate_descriptor.py b/laboneq/contrib/example_helpers/generate_descriptor.py new file mode 100644 index 0000000..335fba7 --- /dev/null +++ b/laboneq/contrib/example_helpers/generate_descriptor.py @@ -0,0 +1,1079 @@ +# Copyright 2020 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +"""Function to generate a descriptor from a list of instruments, making basic assumptions about the type of signals needed +""" + +import time +from pathlib import Path + +import yaml +from zhinst.toolkit.driver.devices import PQSC +from zhinst.toolkit.session import Session + + +def generate_descriptor( + pqsc: list = None, # ["DEV10XX0"] + hdawg_4: list = None, # ["DEV8XX0", "DEV8XX1"], + hdawg_8: list = None, + uhfqa: list = None, + shfsg_4: list = None, # ["DEV12XX0", "DEV12XX1"], + shfsg_8: list = None, + shfqc_2: list = None, + shfqc_4: list = None, + shfqc_6: list = None, # ["DEV12XX2"], + shfqa_2: list = None, + shfqa_4: list = None, # ["DEV12XX3"], + number_data_qubits=2, + number_flux_lines=0, + multiplex=False, + number_multiplex=0, + drive_only=False, + readout_only=False, + internal_clock=False, + save=False, + filename="yaml_descriptor", + get_zsync=False, + get_dio=False, + ip_address: str = "localhost", +): + """A function to generate a descriptor given a list of devices based on wiring assumptions. + + With this function, you can generate a descriptor file quickly. This descriptor will produce + a string or (optionally) a YAML file using the instruments provided, logical signal groups + derived from the number of specified qubits, and logical signal lines based off of the options + chosen by the user, e.g., whether to multiplex the readout, how many qubits to multiplex per + readout line, etc. The generated descriptor therefore specifies to the user how their qubits + should be wired to the Zurich Instruments devices. If the user prefers to specify the wiring + of each qubit individually, this should be done by editing the YAML string manually. + + Args: + pqsc: The device id of your PQSC as a list (e.g. `["DEV10XX0"]`). + Note: only one PQSC is possible per set-up. + hdawg_4: The device id(s) of your 4-channel HDAWG instruments as a list + (e.g. `["DEV8XX0", "DEV8XX1"]`). + hdawg_8: The device id(s) of your 8-channel HDAWG instruments as a list + (e.g. `["DEV8XX2", "DEV8XX3", "DEV8XX4"]`). + uhfqa: The device id(s) of your UHFQA instruments as a list + (e.g. `["DEV2XX0", "DEV2XX1"]`). + Note: The UHFQA cannot be used combined with SHF devices. + shfsg_4: The device id(s) of your 4-channel SHFSG instruments as a list + (e.g. `["DEV12XX0"]`). + shfsg_8: The device id(s) of your 8-channel SHFSG instruments as a list + (e.g. `["DEV12XX1", "DEV12XX2"]`). + shfqc_2: The device id(s) of your 2 SG-channel SHFQC instruments as a list + (e.g. `["DEV12XX3"]`). + shfqc_4: The device id(s) of your 4 SG-channel SHFQC instruments as a list + (e.g. `["DEV12XX4"]`). + shfqc_6: The device id(s) of your 6 SG-channel SHFQC instruments as a list + (e.g. `["DEV12XX5", "DEV12XX6", "DEV12XX7"]`). + shfqa_2: The device id(s) of your 2-channel SHFQA instruments as a list + (e.g. `["DEV12XX8"]`). + shfqa_4: The device id(s) of your 4-channel SHFQA instruments as a list + (e.g. `["DEV12XX9"]`). + number_data_qubits: The number of qubits to create logical signal groups for. + number_flux_lines: The total number of flux lines, using the assumption that there + is no more than one flux line per qubit. + multiplex: If True, qubits will be multiplexed according to number_multiplex. + number_multiplex: The number of qubits to multiplex per physical quantum analyzer channel. + drive_only: If True, generates a descriptor without measure or acquisition lines. + readout_only: If True, generates a descriptor without drive or flux lines. + internal_clock: If True, uses the internal PQSC clock. + Note: the PQSC internal clock can not be used combined with the UHFQA. + save: If True, creates a Descriptor file in the active directory and + saves a YAML file with the name specified in filename. + filename: The file name to give to the YAML descriptor (e.g. `"yaml_descriptor"`). + get_zsync: If True, starts a Session to communicate with the PQSC and + listed devices to determine the connections of the ZSync cables. + get_dio: If True, starts a Session to determine the connections of HDAWG + to UHFQA instruments via DIO cables. + ip_address: The IP address needed to connect to the instruments if using + get_zsync or get_dio. + + Returns: + A string in YAML format and, optionally, a YAML file. + """ + + # Make combined lists and dicts + pqsc_list = [] + hd_list = [] + uhf_list = [] + sg_list = [] + qc_list = [] + qa_list = [] + all_list = [] + devid_uid = {} + + if pqsc is not None: + for i in pqsc: + pqsc_list.append(i) + if hdawg_4 is not None: + for i in hdawg_4: + hd_list.append(i) + all_list.append(i) + devid_uid[i] = f"HDAWG_{i}" + if hdawg_8 is not None: + for i in hdawg_8: + hd_list.append(i) + all_list.append(i) + devid_uid[i] = f"HDAWG_{i}" + if uhfqa is not None: + for i in uhfqa: + uhf_list.append(i) + all_list.append(i) + devid_uid[i] = f"UHFQA_{i}" + if shfsg_4 is not None: + for i in shfsg_4: + sg_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFSG_{i}" + if shfsg_8 is not None: + for i in shfsg_8: + sg_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFSG_{i}" + if shfqc_2 is not None: + for i in shfqc_2: + qc_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFQC_{i}" + if shfqc_4 is not None: + for i in shfqc_4: + qc_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFQC_{i}" + if shfqc_6 is not None: + for i in shfqc_6: + qc_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFQC_{i}" + if shfqa_2 is not None: + for i in shfqa_2: + qa_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFQA_{i}" + if shfqa_4 is not None: + for i in shfqa_4: + qa_list.append(i) + all_list.append(i) + devid_uid[i] = f"SHFQA_{i}" + + # Get numbers of instruments from user lists + number_hdawg_4 = len(hdawg_4) if hdawg_4 is not None else 0 + number_hdawg_8 = len(hdawg_8) if hdawg_8 is not None else 0 + + number_uhfqa = len(uhfqa) if uhfqa is not None else 0 + + number_shfsg_4 = len(shfsg_4) if shfsg_4 is not None else 0 + number_shfsg_8 = len(shfsg_8) if shfsg_8 is not None else 0 + + number_shfqc_2 = len(shfqc_2) if shfqc_2 is not None else 0 + number_shfqc_4 = len(shfqc_4) if shfqc_4 is not None else 0 + number_shfqc_6 = len(shfqc_6) if shfqc_6 is not None else 0 + + number_shfqa_2 = len(shfqa_2) if shfqa_2 is not None else 0 + number_shfqa_4 = len(shfqa_4) if shfqa_4 is not None else 0 + + # Specify instrument channels + n_qa_uhfqa = 1 + n_qa_shfqa_2 = 2 + n_qa_shfqa_4 = 4 + n_qa_shfqc_6 = 1 + n_qa_shfqc_4 = 1 + n_qa_shfqc_2 = 1 + + n_ch_hdawg_4 = 4 + n_ch_hdawg_8 = 8 + + n_iq_shfsg_4 = 4 + n_iq_shfsg_8 = 8 + n_iq_shfqc_2 = 2 + n_iq_shfqc_4 = 4 + n_iq_shfqc_6 = 6 + + if uhfqa is None: + number_qa_lines = ( + n_qa_shfqa_2 * number_shfqa_2 + + n_qa_shfqa_4 * number_shfqa_4 + + n_qa_shfqc_6 * number_shfqc_6 + + n_qa_shfqc_4 * number_shfqc_4 + + n_qa_shfqc_2 * number_shfqc_2 + ) + elif uhfqa is not None: + number_qa_lines = n_qa_uhfqa * number_uhfqa + + # Check if enough output lines are present + number_shf_output_lines = ( + n_iq_shfqc_2 * number_shfqc_2 + + n_iq_shfqc_4 * number_shfqc_4 + + n_iq_shfqc_6 * number_shfqc_6 + + n_iq_shfsg_4 * number_shfsg_4 + + n_iq_shfsg_8 * number_shfsg_8 + ) + + number_hd_lines = n_ch_hdawg_4 * number_hdawg_4 + n_ch_hdawg_8 * number_hdawg_8 + + # Check remainong control lines + after_shf_iq = number_shf_output_lines - number_data_qubits + # print(f"After SHF IQ: {after_shf_iq}") + + # Check remaining flux lines + after_hd_flux = number_hd_lines - number_flux_lines + # print(f"After HD flux: {after_hd_flux}") + + if after_shf_iq < 0: + leftover_output = int(after_hd_flux / 2) + after_shf_iq + elif after_shf_iq > 0 and after_hd_flux < 0: + leftover_output = after_shf_iq + after_hd_flux + elif after_hd_flux == 0: + leftover_output = after_shf_iq + elif after_shf_iq == 0: + leftover_output = after_hd_flux + elif after_shf_iq > 0 and after_hd_flux > 0: + leftover_output = after_shf_iq + after_hd_flux + + # Check if enough analyzer channels are present + if multiplex is False: + tot_qa_lines = number_qa_lines + elif multiplex is True: + tot_qa_lines = number_multiplex * number_qa_lines + + # Compatibility check + if len(all_list) > 1 and not pqsc: + print( + """\ +A PQSC is required to synchronize multiple instruments. +If you are using only a single HDAWG and UHFQA, please see +https://docs.zhinst.com/labone_q_user_manual/concepts/set_up_equipment.html +for how to set them up without a PQSC. + """ + ) + return + if len(all_list) > 18: + print( + "The PQSC only supports up to 18 instruments. Please reduce number of instruments." + ) + return + elif pqsc is not None and len(pqsc) > 1: + print("Cannot have more than one PQSC in a descriptor!") + return + elif len(all_list) != len(set(all_list)): + print("Duplicate Device IDs! Please check your device lists.") + return + elif leftover_output < 0 and readout_only is False: + print("Not enough output lines for number of qubits specified!") + return + elif uhfqa is not None and number_multiplex >= 11: + print("Can't multiplex more than 10 qubits on a UHFQA!") + return + elif uhfqa is not None and internal_clock is True: + print("PQSC internal clock not supported with a UHFQA!") + return + elif number_multiplex >= 17: + print("Can't multiplex more than 16 qubits on a SHFQA or QC analyzer channel!") + return + elif tot_qa_lines < number_data_qubits and drive_only is False: + print("Not enough available readout lines!") + return + elif ( + uhfqa is not None + and (shfqa_2 or shfqa_4 or shfqc_2 or shfqc_4 or shfqc_6 or shfsg_4 or shfsg_8) + is not None + ): + print("UHFQA not supported in combination with SHF Instruments.") + return + elif ( + get_dio is True + and (shfqa_2 or shfqa_4 or shfqc_2 or shfqc_4 or shfqc_6 or shfsg_4 or shfsg_8) + is not None + ): + print("Get DIO not supported with SHF Instruments.") + return + + # Create instrument dictionary + def generate_instrument_list(instrument, instrument_name): + instrument_list = [] + for entry in instrument: + instrument_list.append( + {"address": entry, "uid": f"{instrument_name}_{entry}"} + ) + return instrument_list + + instrument_dict = { + "PQSC": generate_instrument_list(pqsc_list, "PQSC") if pqsc else None, + "HDAWG": generate_instrument_list(hd_list, "HDAWG") if hd_list else None, + "UHFQA": generate_instrument_list(uhf_list, "UHFQA") if uhf_list else None, + "SHFSG": generate_instrument_list(sg_list, "SHFSG") if sg_list else None, + "SHFQC": generate_instrument_list(qc_list, "SHFQC") if qc_list else None, + "SHFQA": generate_instrument_list(qa_list, "SHFQA") if qa_list else None, + } + + clean_instruments_dict = { + "instruments": {k: v for k, v in instrument_dict.items() if v is not None} + } + + # Assign logical signals and ports + signal_and_port_dict = {} + # IQ Line Outputs + current_qubit = 0 + if readout_only is False: + if shfqc_6 is not None: + i_shfqc_6, i_qc_ch_6 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_6[i_shfqc_6]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"SGCHANNELS/{i_qc_ch_6}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_ef", + "ports": f"SGCHANNELS/{i_qc_ch_6}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_cr", + "ports": f"SGCHANNELS/{i_qc_ch_6}/OUTPUT", + } + ) + i_qc_ch_6 += 1 + current_qubit += 1 + if i_qc_ch_6 >= n_iq_shfqc_6: + i_qc_ch_6 = 0 + i_shfqc_6 += 1 + if i_shfqc_6 == len(shfqc_6): + break + if shfqc_4 is not None: + i_shfqc_4, i_qc_ch_4 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_4[i_shfqc_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"SGCHANNELS/{i_qc_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_ef", + "ports": f"SGCHANNELS/{i_qc_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_cr", + "ports": f"SGCHANNELS/{i_qc_ch_4}/OUTPUT", + } + ) + i_qc_ch_4 += 1 + current_qubit += 1 + if i_qc_ch_4 >= n_iq_shfqc_4: + i_qc_ch_4 = 0 + i_shfqc_4 += 1 + if i_shfqc_4 == len(shfqc_4): + break + if shfqc_2 is not None: + i_shfqc_2, i_qc_ch_2 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_2[i_shfqc_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"SGCHANNELS/{i_qc_ch_2}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_ef", + "ports": f"SGCHANNELS/{i_qc_ch_2}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_cr", + "ports": f"SGCHANNELS/{i_qc_ch_2}/OUTPUT", + } + ) + i_qc_ch_2 += 1 + current_qubit += 1 + if i_qc_ch_2 >= n_iq_shfqc_2: + i_qc_ch_2 = 0 + i_shfqc_2 += 1 + if i_shfqc_2 == len(shfqc_2): + break + if shfsg_8 is not None: + i_shfsg_8, i_sg_ch_8 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFSG_{shfsg_8[i_shfsg_8]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"SGCHANNELS/{i_sg_ch_8}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_ef", + "ports": f"SGCHANNELS/{i_sg_ch_8}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_cr", + "ports": f"SGCHANNELS/{i_sg_ch_8}/OUTPUT", + } + ) + i_sg_ch_8 += 1 + current_qubit += 1 + if i_sg_ch_8 >= n_iq_shfsg_8: + i_sg_ch_8 = 0 + i_shfsg_8 += 1 + if i_shfsg_8 == len(shfsg_8): + break + if shfsg_4 is not None: + i_shfsg_4, i_sg_ch_4 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFSG_{shfsg_4[i_shfsg_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"SGCHANNELS/{i_sg_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_ef", + "ports": f"SGCHANNELS/{i_sg_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line_cr", + "ports": f"SGCHANNELS/{i_sg_ch_4}/OUTPUT", + } + ) + i_sg_ch_4 += 1 + current_qubit += 1 + if i_sg_ch_4 >= n_iq_shfsg_4: + i_sg_ch_4 = 0 + i_shfsg_4 += 1 + if i_shfsg_4 == len(shfsg_4): + break + if hdawg_8 is not None: + i_hdawg_8, i_hd_ch_8 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"HDAWG_{hdawg_8[i_hdawg_8]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"[SIGOUTS/{i_hd_ch_8}, SIGOUTS/{i_hd_ch_8+1}]", + } + ) + i_hd_ch_8 += 2 + current_qubit += 1 + if i_hd_ch_8 >= n_ch_hdawg_8: + i_hd_ch_8 = 0 + i_hdawg_8 += 1 + if i_hdawg_8 == len(hdawg_8): + break + if hdawg_4 is not None: + i_hdawg_4, i_hd_ch_4 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"HDAWG_{hdawg_4[i_hdawg_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/drive_line", + "ports": f"[SIGOUTS/{i_hd_ch_4}, SIGOUTS/{i_hd_ch_4+1}]", + } + ) + i_hd_ch_4 += 2 + current_qubit += 1 + if i_hd_ch_4 >= n_ch_hdawg_4: + i_hd_ch_4 = 0 + i_hdawg_4 += 1 + if i_hdawg_4 == len(hdawg_4): + break + + # Flux Lines + current_qubit = 0 + if readout_only is False: + if hdawg_8 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"HDAWG_{hdawg_8[i_hdawg_8]}", [] + ) + sig_dict.append( + { + "rf_signal": f"q{i}/flux_line", + "ports": f"SIGOUTS/{i_hd_ch_8}", + } + ) + i_hd_ch_8 += 1 + current_qubit += 1 + if i_hd_ch_8 >= n_ch_hdawg_8: + i_hd_ch_8 = 0 + i_hdawg_8 += 1 + if i_hdawg_8 == len(hdawg_8): + break + if hdawg_4 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"HDAWG_{hdawg_4[i_hdawg_4]}", [] + ) + sig_dict.append( + { + "rf_signal": f"q{i}/flux_line", + "ports": f"SIGOUTS/{i_hd_ch_4}", + } + ) + i_hd_ch_4 += 1 + current_qubit += 1 + if i_hd_ch_4 >= n_ch_hdawg_4: + i_hd_ch_4 = 0 + i_hdawg_4 += 1 + if i_hdawg_4 == len(hdawg_4): + break + if shfsg_8 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"SHFSG_{shfsg_8[i_shfsg_8]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/flux_line", + "ports": f"SGCHANNELS/{i_sg_ch_8}/OUTPUT", + } + ) + i_sg_ch_8 += 1 + current_qubit += 1 + if i_sg_ch_8 >= n_iq_shfsg_8: + i_sg_ch_8 = 0 + i_shfsg_8 += 1 + if i_shfsg_8 == len(shfsg_8): + break + if shfsg_4 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"SHFSG_{shfsg_4[i_shfsg_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/flux_line", + "ports": f"SGCHANNELS/{i_sg_ch_4}/OUTPUT", + } + ) + i_sg_ch_4 += 1 + current_qubit += 1 + if i_sg_ch_4 >= n_iq_shfsg_4: + i_sg_ch_4 = 0 + i_shfsg_4 += 1 + if i_shfsg_4 == len(shfsg_4): + break + if shfqc_6 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_6[i_shfqc_6]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/flux_line", + "ports": f"SGCHANNELS/{i_qc_ch_6}/OUTPUT", + } + ) + i_qc_ch_6 += 1 + current_qubit += 1 + if i_qc_ch_6 >= n_iq_shfqc_6: + i_qc_ch_6 = 0 + i_shfqc_6 += 1 + if i_shfqc_6 == len(shfqc_6): + break + if shfqc_4 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_4[i_shfqc_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/flux_line", + "ports": f"SGCHANNELS/{i_qc_ch_4}/OUTPUT", + } + ) + i_qc_ch_4 += 1 + current_qubit += 1 + if i_qc_ch_4 >= n_iq_shfqc_4: + i_qc_ch_4 = 0 + i_shfqc_4 += 1 + if i_shfqc_4 == len(shfqc_4): + break + if shfqc_2 is not None: + for i in range(current_qubit, number_flux_lines): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_2[i_shfqc_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/flux_line", + "ports": f"SGCHANNELS/{i_qc_ch_2}/OUTPUT", + } + ) + i_qc_ch_2 += 1 + current_qubit += 1 + if i_qc_ch_2 >= n_iq_shfqc_2: + i_qc_ch_2 = 0 + i_shfqc_2 += 1 + if i_shfqc_2 == len(shfqc_2): + break + + # QA Lines + current_qubit = 0 + # Without multiplexed readout + if not multiplex and not drive_only: + if shfqc_6 is not None: + i_shfqc_6, i_qc_qa_6 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_6[i_shfqc_6]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_6}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_6}/INPUT", + } + ) + i_qc_qa_6 += 1 + current_qubit += 1 + if i_qc_qa_6 >= n_qa_shfqc_6: + i_qc_qa_6 = 0 + i_shfqc_6 += 1 + if i_shfqc_6 == len(shfqc_6): + break + if shfqc_4 is not None: + i_shfqc_4, i_qc_qa_4 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_4[i_shfqc_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_4}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_4}/INPUT", + } + ) + i_qc_qa_4 += 1 + current_qubit += 1 + if i_qc_qa_4 >= n_qa_shfqc_4: + i_qc_qa_4 = 0 + i_shfqc_4 += 1 + if i_shfqc_4 == len(shfqc_4): + break + if shfqc_2 is not None: + i_shfqc_2, i_qc_qa_2 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_2[i_shfqc_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_2}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_2}/INPUT", + } + ) + i_qc_qa_2 += 1 + current_qubit += 1 + if i_qc_qa_2 >= n_qa_shfqc_2: + i_qc_qa_2 = 0 + i_shfqc_2 += 1 + if i_shfqc_2 == len(shfqc_2): + break + if shfqa_4 is not None: + i_shfqa_4, i_qa_ch_4 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQA_{shfqa_4[i_shfqa_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qa_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qa_ch_4}/INPUT", + } + ) + i_qa_ch_4 += 1 + current_qubit += 1 + if i_qa_ch_4 >= n_qa_shfqa_4: + i_qa_ch_4 = 0 + i_shfqa_4 += 1 + if i_shfqa_4 == len(shfqa_4): + break + if shfqa_2 is not None: + i_shfqa_2, i_qa_ch_2 = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQA_{shfqa_2[i_shfqa_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qa_ch_2}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qa_ch_2}/INPUT", + } + ) + i_qa_ch_2 += 1 + current_qubit += 1 + if i_qa_ch_2 >= n_qa_shfqa_2: + i_qa_ch_2 = 0 + i_shfqa_2 += 1 + if i_shfqa_2 == len(shfqa_2): + break + if uhfqa is not None: + i_uhfqa, i_uhfqa_ch = 0, 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"UHFQA_{uhfqa[i_uhfqa]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"[SIGOUTS/{i_uhfqa_ch}, SIGOUTS/{i_uhfqa_ch+1}]", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"[SIGINS/{i_uhfqa_ch},SIGINS/{i_uhfqa_ch+1}]", + } + ) + i_uhfqa_ch += 2 + current_qubit += 1 + if i_uhfqa_ch >= n_qa_uhfqa: + i_uhfqa_ch = 0 + i_uhfqa += 1 + if i_uhfqa == len(uhfqa): + break + # With multiplexed readout + if multiplex and not drive_only: + if shfqc_6 is not None: + i_shfqc_6, i_qc_qa_6 = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_6[i_shfqc_6]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_6}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_6}/INPUT", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_qc_qa_6 += 1 + if i_qc_qa_6 >= n_qa_shfqc_6: + i_qc_qa_6 = 0 + i_shfqc_6 += 1 + if i_shfqc_6 == len(shfqc_6): + break + if shfqc_4 is not None: + i_shfqc_4, i_qc_qa_4 = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_4[i_shfqc_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_4}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_4}/INPUT", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_qc_qa_4 += 1 + if i_qc_qa_4 >= n_qa_shfqc_4: + i_qc_qa_4 = 0 + i_shfqc_4 += 1 + if i_shfqc_4 == len(shfqc_4): + break + if shfqc_2 is not None: + i_shfqc_2, i_qc_qa_2 = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQC_{shfqc_2[i_shfqc_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qc_qa_2}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qc_qa_2}/INPUT", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_qc_qa_2 += 1 + if i_qc_qa_2 >= n_qa_shfqc_2: + i_qc_qa_2 = 0 + i_shfqc_2 += 1 + if i_shfqc_2 == len(shfqc_2): + break + if shfqa_4 is not None: + i_shfqa_4, i_qa_ch_4 = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQA_{shfqa_4[i_shfqa_4]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qa_ch_4}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qa_ch_4}/INPUT", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_qa_ch_4 += 1 + if i_qa_ch_4 >= n_qa_shfqa_4: + i_qa_ch_4 = 0 + i_shfqa_4 += 1 + if i_shfqa_4 == len(shfqa_4): + break + if shfqa_2 is not None: + i_shfqa_2, i_qa_ch_2 = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"SHFQA_{shfqa_2[i_shfqa_2]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"QACHANNELS/{i_qa_ch_2}/OUTPUT", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"QACHANNELS/{i_qa_ch_2}/INPUT", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_qa_ch_2 += 1 + if i_qa_ch_2 >= n_qa_shfqa_2: + i_qa_ch_2 = 0 + i_shfqa_2 += 1 + if i_shfqa_2 == len(shfqa_2): + break + if uhfqa is not None: + i_uhfqa, i_uhfqa_ch = 0, 0 + multiplex_number = 0 + for i in range(current_qubit, number_data_qubits): + sig_dict = signal_and_port_dict.setdefault( + f"UHFQA_{uhfqa[i_uhfqa]}", [] + ) + sig_dict.append( + { + "iq_signal": f"q{i}/measure_line", + "ports": f"[SIGOUTS/{i_uhfqa_ch}, SIGOUTS/{i_uhfqa_ch+1}]", + } + ) + sig_dict.append( + { + "acquire_signal": f"q{i}/acquire_line", + "ports": f"[SIGINS/{i_uhfqa_ch},SIGINS/{i_uhfqa_ch+1}]", + } + ) + current_qubit += 1 + multiplex_number += 1 + if multiplex_number >= number_multiplex: + multiplex_number = 0 + i_uhfqa_ch += 2 + if i_uhfqa_ch >= n_qa_uhfqa: + i_uhfqa_ch = 0 + i_uhfqa += 1 + if i_uhfqa == len(uhfqa): + break + + # PQSC connections + if pqsc is not None and not get_zsync: + for i, k in enumerate(devid_uid): + sig_dict = signal_and_port_dict.setdefault(f"PQSC_{pqsc[0]}", []) + if devid_uid[k].split("_")[0] == "UHFQA": + continue + sig_dict.append( + { + "to": f"{devid_uid[k]}", + "port": f"ZSYNCS/{i}", + } + ) + if internal_clock is True: + sig_dict.append("internal_clock_signal") + + if get_zsync or get_dio: + session = Session(ip_address) + if pqsc is not None and get_zsync: + device_pqsc = session.connect_device(pqsc[0]) + print("Checking PQSC Connections...") + for i, k in enumerate(devid_uid): + session_device = session.connect_device(devid_uid[k].split("_")[1]) + if "SHF" in session_device.device_type: + print(devid_uid[k].split("_")[1]) + session_device.system.clocks.referenceclock.in_.source(2) + if "HDAWG" in session_device.device_type: + print(devid_uid[k].split("_")[1]) + session_device.system.clocks.referenceclock.source(2) + if "UHFQA" in session_device.device_type: + continue + time.sleep(2) + sig_dict = signal_and_port_dict.setdefault(f"PQSC_{pqsc[0]}", []) + sig_dict.append( + { + "to": f"{devid_uid[k]}", + "port": f"ZSYNCS/{PQSC.find_zsync_worker_port(self=device_pqsc, device=session_device)}", + } + ) + if internal_clock is True: + sig_dict.append("internal_clock_signal") + + # HD and UHFQA DIOS + if get_dio: + time.sleep(2) + if hdawg_8 is not None and uhfqa is not None: + for hd in hdawg_8: + sig_dict = signal_and_port_dict.setdefault(f"HDAWG_{hd}", []) + device_hd = session.connect_device(hd) + device_hd.dios[0].output(int(hd.split("V")[1])) + device_hd.dios[0].drive(15) + for uhf in uhfqa: + device_uhfqa = session.connect_device(uhf) + time.sleep(4) + codeword = device_uhfqa.dios[0].input()["dio"][0] + if codeword == int(hd.split("V")[1]): + print(f"{hd} connected to {uhf} via DIO") + sig_dict.append( + { + "to": f"UHFQA_{uhf}", + "port": "DIOS/0", + } + ) + device_hd.dios[0].drive(0) + if hdawg_4 is not None and uhfqa is not None: + for hd in hdawg_4: + sig_dict = signal_and_port_dict.setdefault(f"HDAWG_{hd}", []) + device_hd = session.connect_device(hd) + device_hd.dios[0].output(int(hd.split("V")[1])) + device_hd.dios[0].drive(15) + for uhf in uhfqa: + device_uhfqa = session.connect_device(uhf) + time.sleep(4) + codeword = device_uhfqa.dios[0].input()["dio"][0] + if codeword == int(hd.split("V")[1]): + print(f"{hd} connected to {uhf} via DIO") + sig_dict.append( + { + "to": f"UHFQA_{uhf}", + "port": "DIOS/0", + } + ) + device_hd.dios[0].drive(0) + with session.set_transaction(): + session.disconnect_device(pqsc[0]) + for device in all_list: + session.disconnect_device(device) + + clean_connections_dict = { + "connections": {k: v for k, v in signal_and_port_dict.items() if v is not None} + } + + # Generate final dictionary and YAML + yaml_dict = {} + + yaml_dict.update(clean_connections_dict) + yaml_dict.update(clean_instruments_dict) + + yaml_final = yaml.safe_dump(yaml_dict) + + if save is True: + Path("Descriptors").mkdir(parents=True, exist_ok=True) + with open(f"./Descriptors/{filename}.yaml", "w") as file: + yaml.safe_dump(yaml_dict, file) + + return yaml_final diff --git a/laboneq/contrib/setup_installation_helpers/README.md b/laboneq/contrib/setup_installation_helpers/README.md new file mode 100644 index 0000000..c0d8d1a --- /dev/null +++ b/laboneq/contrib/setup_installation_helpers/README.md @@ -0,0 +1,111 @@ +![LabOne Q logo](https://github.com/zhinst/laboneq/raw/main/docs/images/Logo_LabOneQ.png) + +# QCCS setup installation helpers + +This folder contains some useful Python scripts to help with the installation of a QCCS research setup. + +## Requirements + +This software comes with the LabOne Q installation. Please refer to the [LabOne Q installation guide](https://docs.zhinst.com/labone_q_user_manual/getting_started/installation.html) for more information. + +## `cable_checker.py` + +This script provides a function to check the connectivity of the QCCS setup to external scopes or internal acquisition units using a loopback cable. The intended use case are research and test systems where the QCCS setup is not necessarily connected to a quantum processor. + +The script will play an individual pulse sequence on each output channel of the QCCS setup. In a future version it will also record the signals on all input channels. The user can then check the recorded signals to verify that the setup and its connections are working as expected. + +Supported device types are: + +HDAWG (=HDAWG8), HDAWG4, HDAWG8, SHFQA (=SHFQA4), SHFQA2, SHFQA4, SHFSG (=SHFSG8), SHFSG4, SHFSG8, SHFQC. + +UHFQA devices are not yet supported. + +### Usage + +The script can be used as follows: + +```python +from laboneq.contrib.setup_installation_helpers.cable_checker import check_cable_experiment, Device + +devices = { + "DEV10001": Device(type="PQSC"), + "DEV8004": Device(type="HDAWG8", zsync_port=1), + "DEV8015": Device(type="HDAWG4", zsync_port=2), + "DEV12012": Device(type="SHFQC", zsync_port=3), +} +experiment, device_setup = check_cable_experiment( + devices=devices, + server_host="11.22.33.44", + server_port=8004, + play_parallel=False, + play_initial_trigger=False, +) + +session = Session(device_setup) +session.connect() +session.run(experiment) +``` + +The script will output a list of all devices and the played pulse patterns played on their output channels. The patterns consist of a start and stop marker, then 4 bits for the device and 4 bits for the output port. A bit is modelled by a square pulse with a length of 64 ns where the amplitude depends on the bit's state. The pulses can be played in parallel or sequentially. Optionally, an initial pulse can be played on all channels to trigger an oscilloscope. + +The script will print a list of all pulse patterns: + +``` +DEV10001 +DEV8047 + - Port: SIGOUTS/0 (...1...1) device_HDAWG8_DEV8047_0_1 + - Port: SIGOUTS/1 (...1..1.) device_HDAWG8_DEV8047_0_2 + - Port: SIGOUTS/2 (...1..11) device_HDAWG8_DEV8047_0_3 + - Port: SIGOUTS/3 (...1.1..) device_HDAWG8_DEV8047_0_4 + - Port: SIGOUTS/4 (...1.1.1) device_HDAWG8_DEV8047_0_5 + - Port: SIGOUTS/5 (...1.11.) device_HDAWG8_DEV8047_0_6 + - Port: SIGOUTS/6 (...1.111) device_HDAWG8_DEV8047_0_7 + - Port: SIGOUTS/7 (...11...) device_HDAWG8_DEV8047_0_8 +DEV8015 + - Port: SIGOUTS/0 (..1....1) device_HDAWG4_DEV8015_0_1 + - Port: SIGOUTS/1 (..1...1.) device_HDAWG4_DEV8015_0_2 + - Port: SIGOUTS/2 (..1...11) device_HDAWG4_DEV8015_0_3 + - Port: SIGOUTS/3 (..1..1..) device_HDAWG4_DEV8015_0_4 +DEV12012 + - Port: QACHANNELS/0/OUTPUT (..11...1) device_SHFQC_DEV12012_0_1 + - Port: SGCHANNELS/0/OUTPUT (..11...1) device_SHFQC_DEV12012_1_1 + - Port: SGCHANNELS/1/OUTPUT (..11..1.) device_SHFQC_DEV12012_1_2 + - Port: SGCHANNELS/2/OUTPUT (..11..11) device_SHFQC_DEV12012_1_3 + - Port: SGCHANNELS/3/OUTPUT (..11.1..) device_SHFQC_DEV12012_1_4 + - Port: SGCHANNELS/4/OUTPUT (..11.1.1) device_SHFQC_DEV12012_1_5 + - Port: SGCHANNELS/5/OUTPUT (..11.11.) device_SHFQC_DEV12012_1_6 +``` + +### Calibration + +It will not set any calibration values - be sure to set the correct output ranges, port modes, lo frequencies, etc. in the device setup before running the experiment. All signals belong to the logical signal group "q". The signal names are determined as as follows: + +`device_TTTTT_DDDDDDD_X_Y` + +where `TTTTT` is the device type (for example `HDAWG8`), `DDDDDDD` is the device serial number (for example `DEV8001`), `X` is the subdevice (for the QA (0) and SG (1) part of the SHFQC) number and `Y` is the port number. + +For example, `device_SHFQC_DEV12012_1_6` is the signal name for the rightmost SG port of the SHFQC with serial number `DEV12012`. + +An example code snippet to set the output ranges and port modes for the SHFQC is shown below: + +```python +from laboneq.simple import SignalCalibration, Oscillator, PortMode + +lsg = device_setup.logical_signal_groups["q"].logical_signals +for id, signal in lsg.items(): + if "HDAWG" in id: + signal.calibration = SignalCalibration(range=3) + elif "SHFSG" in id: + signal.calibration = SignalCalibration( + range=5, + local_oscillator=Oscillator(frequency=0), + port_mode=PortMode.LF, + oscillator=Oscillator(frequency=0, modulation_type=ModulationType.HARDWARE), + ) + elif "SHFQA" in id: + signal.calibration = SignalCalibration( + range=5, + local_oscillator=Oscillator(frequency=1e9), + ) + +``` diff --git a/laboneq/contrib/setup_installation_helpers/__init__.py b/laboneq/contrib/setup_installation_helpers/__init__.py new file mode 100644 index 0000000..0a7bff2 --- /dev/null +++ b/laboneq/contrib/setup_installation_helpers/__init__.py @@ -0,0 +1,2 @@ +# Copyright 2020 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 diff --git a/laboneq/contrib/setup_installation_helpers/cable_checker.py b/laboneq/contrib/setup_installation_helpers/cable_checker.py new file mode 100644 index 0000000..a4e5ea4 --- /dev/null +++ b/laboneq/contrib/setup_installation_helpers/cable_checker.py @@ -0,0 +1,446 @@ +# Copyright 2019 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + + +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum + +import numpy as np + +from laboneq.dsl.device.device_setup import DeviceSetup +from laboneq.dsl.experiment import pulse_library +from laboneq.dsl.experiment.experiment import Experiment + + +class SignalType(Enum): + """:meta private:""" + + RF = "rf_signal" + IQ = "iq_signal" + + +@dataclass +class DeviceProperties: + """:meta private:""" + + number_of_sg_ports: int | list[int] + number_of_channels_per_sg_port: int + number_of_qa_ports: int + number_of_channels_per_qa_port: int + sg_signal_type: SignalType + channel_per_pulse: bool | list[bool] + sg_prefix: str | list[str] + sg_suffix: str + qa_prefix: str + qa_suffix: str + + +@dataclass +class Signal: + """:meta private:""" + + base_signal_id: str + signal_ids: dict[str, str] + device_id: str + signal_number: int + port: str + type: SignalType + + def get_for(self, pulse): + return self.signal_ids.get(pulse, self.signal_ids.get("", None)) + + +@dataclass +class Device: + """Device to consider for cable checking.""" + + type: str + zsync_port: int | None = None + + +DEVICE_PROPERTIES = { + "PQSC": DeviceProperties( + number_of_sg_ports=0, + number_of_channels_per_sg_port=0, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.IQ, + channel_per_pulse=False, + sg_prefix="", + sg_suffix="", + qa_prefix="", + qa_suffix="", + ), + "HDAWG": DeviceProperties( + number_of_sg_ports=8, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.RF, + channel_per_pulse=False, + sg_prefix="SIGOUTS/", + sg_suffix="", + qa_prefix="", + qa_suffix="", + ), + "HDAWG4": DeviceProperties( + number_of_sg_ports=4, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.RF, + channel_per_pulse=False, + sg_prefix="SIGOUTS/", + sg_suffix="", + qa_prefix="", + qa_suffix="", + ), + "HDAWG8": DeviceProperties( + number_of_sg_ports=8, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.RF, + channel_per_pulse=False, + sg_prefix="SIGOUTS/", + sg_suffix="", + qa_prefix="", + qa_suffix="", + ), + "SHFQA": DeviceProperties( + number_of_sg_ports=4, + number_of_channels_per_sg_port=1, + number_of_qa_ports=4, + number_of_channels_per_qa_port=1, + sg_signal_type=SignalType.IQ, + channel_per_pulse=True, + sg_prefix="QACHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="QACHANNELS/", + qa_suffix="/INPUT", + ), + "SHFQA2": DeviceProperties( + number_of_sg_ports=2, + number_of_channels_per_sg_port=1, + number_of_qa_ports=2, + number_of_channels_per_qa_port=1, + sg_signal_type=SignalType.IQ, + channel_per_pulse=True, + sg_prefix="QACHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="QACHANNELS/", + qa_suffix="/INPUT", + ), + "SHFQA4": DeviceProperties( + number_of_sg_ports=4, + number_of_channels_per_sg_port=1, + number_of_qa_ports=4, + number_of_channels_per_qa_port=1, + sg_signal_type=SignalType.IQ, + channel_per_pulse=True, + sg_prefix="QACHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="QACHANNELS/", + qa_suffix="/INPUT", + ), + "SHFSG": DeviceProperties( + number_of_sg_ports=8, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.IQ, + channel_per_pulse=False, + sg_prefix="SGCHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="", + qa_suffix="", + ), + "SHFSG4": DeviceProperties( + number_of_sg_ports=4, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.IQ, + channel_per_pulse=False, + sg_prefix="SGCHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="", + qa_suffix="", + ), + "SHFSG8": DeviceProperties( + number_of_sg_ports=8, + number_of_channels_per_sg_port=1, + number_of_qa_ports=0, + number_of_channels_per_qa_port=0, + sg_signal_type=SignalType.IQ, + channel_per_pulse=False, + sg_prefix="SGCHANNELS/", + sg_suffix="/OUTPUT", + qa_prefix="", + qa_suffix="", + ), + # "UHFQA": DeviceProperties(2, 2, 2, 2, SignalType.IQ, "SIGOUTS/", "", "", ""), + # QA ports come first! + "SHFQC": DeviceProperties( + number_of_sg_ports=[1, 6], + number_of_channels_per_sg_port=1, + number_of_qa_ports=1, + number_of_channels_per_qa_port=1, + sg_signal_type=SignalType.IQ, + channel_per_pulse=[True, False], + sg_prefix=["QACHANNELS/", "SGCHANNELS/"], + sg_suffix="/OUTPUT", + qa_prefix="QACHANNELS/", + qa_suffix="/INPUT", + ), +} + + +def create_device_setup( + devices: dict[str, Device], server_host: str, server_port: int | str +) -> tuple[DeviceSetup, list[Signal], list[Signal]]: + """:meta private:""" + dataservers: dict[str, dict[str, str | list[str]]] = { + "zi_server": {"host": server_host, "port": int(server_port), "instruments": []} + } + instruments: dict[str, list[dict[str, str]]] = {} + connections: dict[str, list[dict[str, str | list[str]]]] = {} + sg_signals: list[Signal] = [] + qa_signals: list[Signal] = [] + + for uid, device in devices.items(): + device_type = device.type.upper() + name = f"device_{device_type}_{uid}" + base_type = device_type[:5].upper() + instruments.setdefault(base_type, []).append({"address": uid, "uid": name}) + dataservers["zi_server"]["instruments"].append(name) # type: ignore + conn = [] + devprops = DEVICE_PROPERTIES[device_type] + + n_sg_ports = devprops.number_of_sg_ports + if not isinstance(n_sg_ports, list): + n_sg_ports = [n_sg_ports] + sg_prefix = [devprops.sg_prefix] + channel_per_pulse = [devprops.channel_per_pulse] + else: + sg_prefix = devprops.sg_prefix # type: ignore + channel_per_pulse = devprops.channel_per_pulse # type: ignore + + for k, (nprt, sgprf, cps) in enumerate( + zip(n_sg_ports, sg_prefix, channel_per_pulse) + ): + j = 1 + for i in range(nprt): + assert ( + devprops.number_of_channels_per_sg_port == 1 + ), "So far, only one channel per port is supported" + ls_name = f"{name}_{k}_{j}" + port = f"{sgprf}{i}{devprops.sg_suffix}" + pulse_list = ["init", "start", "end", "1", "."] if cps else [""] + conn += [ + { + devprops.sg_signal_type.value: f"q/{ls_name}{pulse}", + "port": port, + } + for pulse in pulse_list + ] + sg_signals.append( + Signal( + base_signal_id=ls_name, + signal_ids={pulse: f"{ls_name}{pulse}" for pulse in pulse_list}, + device_id=uid, + signal_number=j, + type=devprops.sg_signal_type, + port=port, + ) + ) + j += devprops.number_of_channels_per_sg_port + for i in range(devprops.number_of_qa_ports): + j = i + 1 + ls_name = f"{name}_0_{j}_acq" + port = f"{devprops.qa_prefix}{i}{devprops.qa_suffix}" + conn.append( + { + "acquire_signal": f"q/{ls_name}", + "port": port, + } + ) + qa_signals.append( + Signal( + base_signal_id=ls_name, + signal_ids={"": ls_name}, + device_id=uid, + signal_number=j, + type=SignalType.IQ, + port=port, + ) + ) + connections[name] = conn + if "PQSC" in instruments: + pqsc_name = instruments["PQSC"][0]["uid"] + for uid, device in devices.items(): + if device.zsync_port is not None: + device_type = device.type.upper() + name = f"device_{device_type}_{uid}" + connections[pqsc_name].append( + {"to": name, "port": f"ZSYNCS/{device.zsync_port}"} + ) + + device_setup = DeviceSetup.from_dicts( + instruments=instruments, connections=connections, dataservers=dataservers + ) + return device_setup, sg_signals, qa_signals + + +def get_matching_acquire(signal_id, qa_signals): + """:meta private:""" + return next( + (qas for qas in qa_signals if qas.base_signal_id == signal_id + "_acq"), None + ) + + +def check_cable_experiment( + devices, + server_host: str, + server_port: str | int, + play_parallel: bool = True, + play_initial_trigger: bool = False, + bit_pulse_length: float = 256e-9, + bit_gap: float = 128e-9, +) -> tuple[Experiment, DeviceSetup]: + """ + Create an experiment to check the cables of the devices in the setup. + + Args: + devices (dict[str, Device]): A dictionary of devices to check. + server_host (str): The server host to connect to. + server_port (int): The server port to connect to. + play_parallel (bool, optional): Whether to play the pulses in parallel. + play_initial_trigger (bool, optional): Whether to play a pulse on each + output as initial trigger. + + Returns: + Tuple[Experiment, DeviceSetup]: The experiment and the device setup to + be used with a Session object. + + Usage: + + .. code-block :: python + + experiment, device_setup = check_cables( + devices=devices, + server_host="11.22.33.44", + server_port=8004, + play_parallel=False, + play_initial_trigger=False, + ) + session=Session(device_setup) + session.connect() + session.run(experiment) + """ + device_setup, sg_signals, qa_signals = create_device_setup( + devices, server_host=server_host, server_port=server_port + ) + init_pulse = pulse_library.const(length=bit_pulse_length) + start_pulse = pulse_library.const(length=bit_pulse_length, amplitude=0.8) + end_pulse = pulse_library.const(length=bit_pulse_length, amplitude=0.9) + on_pulse = pulse_library.const(length=bit_pulse_length, amplitude=0.5) + off_pulse = pulse_library.const(length=bit_pulse_length, amplitude=0.2) + acq_pulse = pulse_library.const(length=bit_pulse_length) + lsg = device_setup.logical_signal_groups["q"].logical_signals + sg_all_signals = {s: lsg[s] for sig in sg_signals for s in sig.signal_ids.values()} + qa_all_signals = {s: lsg[s] for sig in qa_signals for s in sig.signal_ids.values()} + signal_map = {**sg_all_signals, **qa_all_signals} + exp = Experiment(signals=list(signal_map.keys())) + exp.set_signal_map(signal_map) + block_signal = sg_signals[0].get_for("start") + with exp.acquire_loop_rt(1): + if play_initial_trigger: + with exp.section(uid="initial_trigger"): + for s in sg_signals: + exp.play( + signal=s.get_for("init"), phase=np.pi / 4, pulse=init_pulse + ) + if qa_sig := get_matching_acquire(s.base_signal_id, qa_signals): + exp.acquire( + signal=qa_sig.signal_ids[""], + kernel=acq_pulse, + handle=qa_sig.signal_ids[""], + ) + exp.delay(signal=s.get_for("init"), time=2 * bit_gap) + + for dev_nr, dev in enumerate(devices.keys()): + print(dev) + bin_rep_dev = np.binary_repr(dev_nr, width=4).replace("0", ".") + my_signals = sorted( + [s for s in sg_signals if s.device_id == dev], + key=lambda s: s.signal_number, + ) + for s in my_signals: + bin_rep_sig = np.binary_repr(s.signal_number, width=4).replace("0", ".") + pattern = bin_rep_dev + bin_rep_sig + print(f" - Port: {s.port} ({pattern}) {s.base_signal_id}") + with exp.section(uid=s.base_signal_id): + if qa_sig := get_matching_acquire(s.base_signal_id, qa_signals): + # The SHFQA is much more restricted in the way measurement pulses can be played. + # We also need at least one acquire to set the acquisition mode to readout. + # So let's just measure everything, which can be used later to check the arriving + # signal pattern. + qa_sig_name = qa_sig.signal_ids[""] + with exp.section(): + with exp.section(): + if not play_parallel: + exp.reserve(block_signal) + exp.measure( + measure_signal=s.get_for("start"), + measure_pulse=start_pulse, + acquire_signal=qa_sig_name, + integration_kernel=acq_pulse, + handle=qa_sig_name, + ) + exp.delay(signal=s.get_for("start"), time=bit_gap) + for p in pattern: + with exp.section(): + if not play_parallel: + exp.reserve(block_signal) + exp.measure( + measure_signal=s.get_for(p), + measure_pulse=on_pulse + if p == "1" + else off_pulse, + acquire_signal=qa_sig_name, + integration_kernel=acq_pulse, + handle=qa_sig_name + "_pattern", + ) + exp.delay(signal=s.get_for(p), time=bit_gap) + with exp.section(): + if not play_parallel: + exp.reserve(block_signal) + exp.measure( + measure_signal=s.get_for("end"), + measure_pulse=end_pulse, + acquire_signal=qa_sig_name, + integration_kernel=acq_pulse, + handle=qa_sig_name, + ) + exp.delay(signal=s.get_for("end"), time=128e-9) + else: + signal_name = s.get_for("") + play_args = { + "signal": signal_name, + "phase": np.pi / 4, + } + if not play_parallel: + exp.reserve(block_signal) + exp.play(**play_args, pulse=start_pulse) + exp.delay(signal=signal_name, time=bit_gap) + for p in pattern: + exp.play( + **play_args, pulse=on_pulse if p == "1" else off_pulse + ) + exp.delay(signal=signal_name, time=bit_gap) + exp.play(**play_args, pulse=end_pulse) + exp.delay(signal=signal_name, time=128e-9) + return exp, device_setup diff --git a/laboneq/controller/communication.py b/laboneq/controller/communication.py index 547dc2d..755887a 100644 --- a/laboneq/controller/communication.py +++ b/laboneq/controller/communication.py @@ -191,7 +191,7 @@ def execute(self): class ServerQualifier: dry_run: bool = True host: str = None - port: int = None + port: int = 8004 api_level: int = None ignore_version_mismatch: bool = False diff --git a/laboneq/controller/controller.py b/laboneq/controller/controller.py index a341e43..a4a8709 100644 --- a/laboneq/controller/controller.py +++ b/laboneq/controller/controller.py @@ -4,6 +4,7 @@ from __future__ import annotations import concurrent.futures +import copy import itertools import logging import os @@ -30,32 +31,31 @@ from laboneq.controller.devices.device_zi import Waveforms from laboneq.controller.devices.zi_node_monitor import ResponseWaiter from laboneq.controller.near_time_runner import NearTimeRunner -from laboneq.controller.recipe_1_4_0 import * # noqa: F401, F403 from laboneq.controller.recipe_processor import ( RecipeData, RtExecutionInfo, pre_process_compiled, ) -from laboneq.controller.results import ( - build_partial_result, - make_acquired_result, - make_empty_results, -) +from laboneq.controller.results import build_partial_result, make_acquired_result from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode from laboneq.core.utilities.replace_pulse import ReplacementType, calc_wave_replacements from laboneq.data.execution_payload import TargetSetup +from laboneq.data.experiment_results import ExperimentResults +from laboneq.data.recipe import NtStepKey from laboneq.executor.execution_from_experiment import ExecutionFactoryFromExperiment from laboneq.executor.executor import Statement +from laboneq.implementation.payload_builder.convert_from_legacy_json_recipe import ( + convert_from_legacy_json_recipe, +) if TYPE_CHECKING: from laboneq.controller.devices.device_zi import DeviceZI from laboneq.core.types import CompiledExperiment from laboneq.data.execution_payload import ExecutionPayload - from laboneq.dsl import Session from laboneq.dsl.experiment.pulse import Pulse - from laboneq.dsl.result.results import Results + from laboneq.dsl.session import Session _logger = logging.getLogger(__name__) @@ -120,8 +120,8 @@ def __init__( self._user_functions: dict[str, Callable] = user_functions self._nodes_from_user_functions: list[DaqNodeAction] = [] self._recipe_data: RecipeData = None - self._session = None - self._results: Results = None + self._session: Any = None + self._results = ExperimentResults() _logger.debug("Controller created") _logger.debug("Controller debug logging is on") @@ -130,7 +130,7 @@ def __init__( def _allocate_resources(self): self._devices.free_allocations() - osc_params = self._recipe_data.recipe.experiment.oscillator_params + osc_params = self._recipe_data.recipe.oscillator_params for osc_param in sorted(osc_params, key=lambda p: p.id): self._devices.find_by_uid(osc_param.device_id).allocate_osc(osc_param) @@ -185,7 +185,7 @@ def _upload_awg_programs(self, nt_step: NtStepKey): rt_exec_step = next( ( r - for r in recipe_data.recipe.experiment.realtime_execution_init + for r in recipe_data.recipe.realtime_execution_init if r.device_id == initialization.device_uid and r.awg_id == awg_obj.awg and r.nt_step == nt_step @@ -398,7 +398,7 @@ def _execute_one_step_leaders(self): batch_set(nodes_to_execute) def _wait_execution_to_stop(self, acquisition_type: AcquisitionType): - min_wait_time = self._recipe_data.recipe.experiment.max_step_execution_time + min_wait_time = self._recipe_data.recipe.max_step_execution_time if min_wait_time is None: _logger.warning( "No estimation available for the execution time, assuming 10 sec." @@ -482,17 +482,22 @@ def execute_compiled_legacy( compiled_experiment.experiment ) + scheduled_experiment = copy.copy(compiled_experiment.scheduled_experiment) + if isinstance(scheduled_experiment.recipe, dict): + scheduled_experiment.recipe = convert_from_legacy_json_recipe( + scheduled_experiment.recipe + ) + self._recipe_data = pre_process_compiled( - compiled_experiment.scheduled_experiment, self._devices, execution + scheduled_experiment, self._devices, execution ) self._session = session - if session is None: - self._results = None - else: - self._results = session._last_results - self._execute_compiled_impl() + if session and session._last_results: + session._last_results.acquired_results = self._results.acquired_results + session._last_results.user_func_results = self._results.user_func_results + session._last_results.execution_errors = self._results.execution_errors def execute_compiled(self, job: ExecutionPayload): self._recipe_data = pre_process_compiled( @@ -500,8 +505,7 @@ def execute_compiled(self, job: ExecutionPayload): self._devices, job.scheduled_experiment.execution, ) - self._results = None - + self._session = None self._execute_compiled_impl() def _execute_compiled_impl(self): @@ -534,7 +538,7 @@ def _execute_compiled_impl(self): def _find_awg(self, seqc_name: str) -> tuple[str, int]: # TODO(2K): Do this in the recipe preprocessor, or even modify the compiled experiment # data model - for rt_exec_step in self._recipe_data.recipe.experiment.realtime_execution_init: + for rt_exec_step in self._recipe_data.recipe.realtime_execution_init: if rt_exec_step.seqc_ref == seqc_name: return rt_exec_step.device_id, rt_exec_step.awg_id return None, None @@ -644,7 +648,7 @@ def _prepare_rt_execution(self, rt_section_uid: str) -> list[DaqNodeAction]: device.configure_acquisition( awg_key, awg_config, - self._recipe_data.recipe.experiment.integrator_allocations, + self._recipe_data.recipe.integrator_allocations, effective_averages, effective_averaging_mode, rt_execution_info.acquisition_type, @@ -653,8 +657,7 @@ def _prepare_rt_execution(self, rt_section_uid: str) -> list[DaqNodeAction]: return nodes_to_prepare_rt def _prepare_result_shapes(self): - if self._results is None: - self._results = make_empty_results() + self._results = ExperimentResults() if len(self._recipe_data.rt_execution_infos) == 0: return if len(self._recipe_data.rt_execution_infos) > 1: @@ -738,7 +741,7 @@ def _read_one_step_results(self, nt_step: NtStepKey, rt_section_uid: str): for signal in awg_config.acquire_signals: integrator_allocation = next( i - for i in self._recipe_data.recipe.experiment.integrator_allocations + for i in self._recipe_data.recipe.integrator_allocations if i.signal_id == signal ) assert integrator_allocation.device_id == awg_key.device_uid diff --git a/laboneq/controller/devices/device_hdawg.py b/laboneq/controller/devices/device_hdawg.py index 07292ea..c199c0d 100644 --- a/laboneq/controller/devices/device_hdawg.py +++ b/laboneq/controller/devices/device_hdawg.py @@ -27,11 +27,10 @@ Prepare, Response, ) -from laboneq.controller.recipe_1_4_0 import Initialization -from laboneq.controller.recipe_enums import SignalType, TriggeringMode from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data.recipe import Initialization, SignalType, TriggeringMode _logger = logging.getLogger(__name__) @@ -204,7 +203,7 @@ def rf_offset_control_nodes(self) -> list[NodeControlBase]: ) return nodes - def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): + def collect_awg_after_upload_nodes(self, initialization: Initialization): nodes_to_configure_phase = [] for awg in initialization.awgs or []: @@ -244,7 +243,7 @@ def conditions_for_execution_done( return conditions def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeAction]: _logger.debug("%s: Initializing device...", self.dev_repr) @@ -456,7 +455,7 @@ def collect_prepare_nt_step_nodes( return nodes_to_set def collect_awg_before_upload_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ): device_specific_initialization_nodes = [ DaqNodeSetAction( @@ -477,7 +476,7 @@ def command_table_path(self, awg_index: int) -> str: return f"/{self.serial}/awgs/{awg_index}/commandtable/" def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: _logger.debug("%s: Configuring trigger configuration nodes.", self.dev_repr) nodes_to_configure_triggers = [] diff --git a/laboneq/controller/devices/device_pqsc.py b/laboneq/controller/devices/device_pqsc.py index c5c57a5..03db30c 100644 --- a/laboneq/controller/devices/device_pqsc.py +++ b/laboneq/controller/devices/device_pqsc.py @@ -19,9 +19,9 @@ NodeControlBase, Response, ) -from laboneq.controller.recipe_1_4_0 import Initialization from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data.recipe import Initialization _logger = logging.getLogger(__name__) @@ -65,14 +65,14 @@ def clock_source_control_nodes(self) -> list[NodeControlBase]: ] def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeAction]: return [] def configure_feedback(self, recipe_data: RecipeData) -> list[DaqNodeAction]: # TODO(2K): Code duplication with Controller._wait_execution_to_stop # Make this mandatory in the recipe instead. - min_wait_time = recipe_data.recipe.experiment.max_step_execution_time + min_wait_time = recipe_data.recipe.max_step_execution_time if min_wait_time is None: min_wait_time = 10.0 # This is required because PQSC is only receiving the feedback events @@ -138,7 +138,7 @@ def conditions_for_execution_done( return {f"/{self.serial}/execution/enable": 0} def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: # TODO(2K): This was moved as is from no more existing "configure_as_leader". # Verify, if separate `batch_set` per node is truly necessary here, or the corresponding diff --git a/laboneq/controller/devices/device_shfppc.py b/laboneq/controller/devices/device_shfppc.py index 2f94d16..1c1d758 100644 --- a/laboneq/controller/devices/device_shfppc.py +++ b/laboneq/controller/devices/device_shfppc.py @@ -12,8 +12,8 @@ ) from laboneq.controller.communication import DaqNodeAction, DaqNodeSetAction from laboneq.controller.devices.device_zi import DeviceZI -from laboneq.controller.recipe_1_4_0 import Initialization from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData +from laboneq.data.recipe import Initialization class DeviceSHFPPC(DeviceZI): @@ -49,7 +49,7 @@ def update_clock_source(self, force_internal: bool | None): def pre_process_attributes( self, - initialization: Initialization.Data, + initialization: Initialization, ) -> Iterator[DeviceAttribute]: yield from super().pre_process_attributes(initialization) ppchannels = initialization.ppchannels or [] @@ -68,7 +68,7 @@ def collect_reset_nodes(self) -> list[DaqNodeAction]: return [] def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeAction]: nodes_to_set: list[DaqNodeAction] = [] ppchannels = initialization.ppchannels or [] diff --git a/laboneq/controller/devices/device_shfqa.py b/laboneq/controller/devices/device_shfqa.py index a4f8cd9..268a384 100644 --- a/laboneq/controller/devices/device_shfqa.py +++ b/laboneq/controller/devices/device_shfqa.py @@ -27,13 +27,6 @@ Waveforms, delay_to_rounded_samples, ) -from laboneq.controller.recipe_1_4_0 import ( - IO, - Initialization, - IntegratorAllocation, - Measurement, -) -from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import ( AwgConfig, AwgKey, @@ -45,6 +38,13 @@ from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType, is_spectroscopy from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.data.recipe import ( + IO, + Initialization, + IntegratorAllocation, + Measurement, + TriggeringMode, +) _logger = logging.getLogger(__name__) @@ -105,7 +105,7 @@ def get_sequencer_paths(self, index: int) -> SequencerPaths: def _get_num_awgs(self): return self._channels - def _validate_range(self, io: IO.Data, is_out: bool): + def _validate_range(self, io: IO, is_out: bool): if io.range is None: return input_ranges = np.array( @@ -192,7 +192,7 @@ def configure_acquisition( self, awg_key: AwgKey, awg_config: AwgConfig, - integrator_allocations: list[IntegratorAllocation.Data], + integrator_allocations: list[IntegratorAllocation], averages: int, averaging_mode: AveragingMode, acquisition_type: AcquisitionType, @@ -229,7 +229,7 @@ def _configure_readout( acquisition_type: AcquisitionType, awg_key: AwgKey, awg_config: AwgConfig, - integrator_allocations: list[IntegratorAllocation.Data], + integrator_allocations: list[IntegratorAllocation], averages: int, average_mode: int, ): @@ -484,7 +484,7 @@ def conditions_for_execution_done( def pre_process_attributes( self, - initialization: Initialization.Data, + initialization: Initialization, ) -> Iterator[DeviceAttribute]: yield from super().pre_process_attributes(initialization) @@ -517,7 +517,7 @@ def pre_process_attributes( ) def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeSetAction]: _logger.debug("%s: Initializing device...", self.dev_repr) @@ -759,9 +759,9 @@ def prepare_upload_all_binary_waves( def _configure_readout_mode_nodes( self, - dev_input: IO.Data, - dev_output: IO.Data, - measurement: Measurement.Data | None, + dev_input: IO, + dev_output: IO, + measurement: Measurement | None, device_uid: str, recipe_data: RecipeData, ): @@ -776,9 +776,7 @@ def _configure_readout_mode_nodes( ] max_len = 4096 - for ( - integrator_allocation - ) in recipe_data.recipe.experiment.integrator_allocations: + for integrator_allocation in recipe_data.recipe.integrator_allocations: if ( integrator_allocation.device_id != device_uid or integrator_allocation.awg != measurement.channel @@ -825,7 +823,7 @@ def _configure_readout_mode_nodes( return nodes_to_set_for_readout_mode def _configure_spectroscopy_mode_nodes( - self, dev_input: IO.Data, measurement: Measurement.Data | None + self, dev_input: IO, measurement: Measurement | None ): _logger.debug("%s: Setting measurement mode to 'Spectroscopy'.", self.dev_repr) @@ -845,7 +843,7 @@ def _configure_spectroscopy_mode_nodes( return nodes_to_set_for_spectroscopy_mode def collect_awg_before_upload_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ): nodes_to_initialize_measurement = [] @@ -894,7 +892,7 @@ def collect_awg_before_upload_nodes( ) return nodes_to_initialize_measurement - def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): + def collect_awg_after_upload_nodes(self, initialization: Initialization): nodes_to_initialize_measurement = [] inputs = initialization.inputs or [] for dev_input in inputs: @@ -936,7 +934,7 @@ def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): return nodes_to_initialize_measurement def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: _logger.debug("Configuring triggers...") self._wait_for_awgs = True diff --git a/laboneq/controller/devices/device_shfsg.py b/laboneq/controller/devices/device_shfsg.py index 87f05f2..7cdf6c9 100644 --- a/laboneq/controller/devices/device_shfsg.py +++ b/laboneq/controller/devices/device_shfsg.py @@ -22,11 +22,10 @@ from laboneq.controller.devices.device_shf_base import DeviceSHFBase from laboneq.controller.devices.device_zi import SequencerPaths from laboneq.controller.devices.zi_node_monitor import NodeControlBase -from laboneq.controller.recipe_1_4_0 import IO, Initialization -from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import DeviceRecipeData, RecipeData from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data.recipe import IO, Initialization, TriggeringMode _logger = logging.getLogger(__name__) @@ -101,7 +100,7 @@ def get_sequencer_paths(self, index: int) -> SequencerPaths: def _get_num_awgs(self): return self._channels - def _validate_range(self, io: IO.Data): + def _validate_range(self, io: IO): if io.range is None: return range_list = numpy.array( @@ -206,7 +205,7 @@ def conditions_for_execution_done( conditions[f"/{self.serial}/sgchannels/{awg_index}/awg/enable"] = 0 return conditions - def _validate_initialization(self, initialization: Initialization.Data): + def _validate_initialization(self, initialization: Initialization): super()._validate_initialization(initialization) outputs = initialization.outputs or [] for output in outputs: @@ -221,13 +220,13 @@ def _validate_initialization(self, initialization: Initialization.Data): def pre_process_attributes( self, - initialization: Initialization.Data, + initialization: Initialization, ) -> Iterator[DeviceAttribute]: yield from super().pre_process_attributes(initialization) - center_frequencies: dict[int, IO.Data] = {} + center_frequencies: dict[int, IO] = {} - def get_synth_idx(io: IO.Data): + def get_synth_idx(io: IO): if io.channel >= self._channels: raise LabOneQControllerException( f"{self.dev_repr}: Attempt to configure channel {io.channel + 1} on a device " @@ -266,7 +265,7 @@ def get_synth_idx(io: IO.Data): ) def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeSetAction]: _logger.debug("%s: Initializing device...", self.dev_repr) @@ -434,7 +433,7 @@ def prepare_upload_binary_wave( ) def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: _logger.debug("Configuring triggers...") self._wait_for_awgs = True diff --git a/laboneq/controller/devices/device_uhfqa.py b/laboneq/controller/devices/device_uhfqa.py index 90d028b..b2675c8 100644 --- a/laboneq/controller/devices/device_uhfqa.py +++ b/laboneq/controller/devices/device_uhfqa.py @@ -20,8 +20,6 @@ ) from laboneq.controller.devices.device_zi import DeviceZI, delay_to_rounded_samples from laboneq.controller.devices.zi_node_monitor import Command, NodeControlBase -from laboneq.controller.recipe_1_4_0 import IO, Initialization, IntegratorAllocation -from laboneq.controller.recipe_enums import TriggeringMode from laboneq.controller.recipe_processor import ( AwgConfig, AwgKey, @@ -33,6 +31,7 @@ from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.data.recipe import IO, Initialization, IntegratorAllocation, TriggeringMode _logger = logging.getLogger(__name__) @@ -102,7 +101,7 @@ def update_clock_source(self, force_internal: bool | None): ) if len(self._uplinks) > 1: self._error_ambiguous_upstream() - upstream = self._uplinks[0]() + upstream = next(iter(self._uplinks))() if upstream is None: self._error_ambiguous_upstream() is_desktop = upstream.is_leader() and ( @@ -143,7 +142,7 @@ def configure_acquisition( self, awg_key: AwgKey, awg_config: AwgConfig, - integrator_allocations: list[IntegratorAllocation.Data], + integrator_allocations: list[IntegratorAllocation], averages: int, averaging_mode: AveragingMode, acquisition_type: AcquisitionType, @@ -169,7 +168,7 @@ def _configure_result_logger( self, awg_key: AwgKey, awg_config: AwgConfig, - integrator_allocations: list[IntegratorAllocation.Data], + integrator_allocations: list[IntegratorAllocation], averages: int, averaging_mode: AveragingMode, acquisition_type: AcquisitionType, @@ -271,7 +270,7 @@ def conditions_for_execution_done( conditions[f"/{self.serial}/awgs/{awg_index}/enable"] = 0 return conditions - def _validate_range(self, io: IO.Data, is_out: bool): + def _validate_range(self, io: IO, is_out: bool): if io.range is None: return @@ -299,7 +298,7 @@ def _validate_range(self, io: IO.Data, is_out: bool): range_list, ) - def _validate_initialization(self, initialization: Initialization.Data): + def _validate_initialization(self, initialization: Initialization): super()._validate_initialization(initialization) outputs = initialization.outputs or [] for output in outputs: @@ -313,7 +312,7 @@ def _validate_initialization(self, initialization: Initialization.Data): ) def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeAction]: _logger.debug("%s: Initializing device...", self.dev_repr) @@ -436,9 +435,7 @@ def _configure_standard_mode_nodes( nodes_to_set_for_standard_mode.append( DaqNodeSetAction(self._daq, f"/{self.serial}/qas/0/integration/mode", 0) ) - for ( - integrator_allocation - ) in recipe_data.recipe.experiment.integrator_allocations: + for integrator_allocation in recipe_data.recipe.integrator_allocations: if integrator_allocation.device_id != device_uid: continue @@ -564,7 +561,7 @@ def _configure_spectroscopy_mode_nodes(self): return nodes_to_set_for_spectroscopy_mode def collect_awg_before_upload_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ): acquisition_type = RtExecutionInfo.get_acquisition_type( recipe_data.rt_execution_infos @@ -576,7 +573,7 @@ def collect_awg_before_upload_nodes( acquisition_type, initialization.device_uid, recipe_data ) - def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): + def collect_awg_after_upload_nodes(self, initialization: Initialization): nodes_to_initialize_measurement = [] inputs = initialization.inputs if len(initialization.measurements) > 0: @@ -616,7 +613,7 @@ def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): return nodes_to_initialize_measurement def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: _logger.debug("Configuring triggers...") _logger.debug("Configuring strobe index: 16.") diff --git a/laboneq/controller/devices/device_zi.py b/laboneq/controller/devices/device_zi.py index 21ad3bf..97770ee 100644 --- a/laboneq/controller/devices/device_zi.py +++ b/laboneq/controller/devices/device_zi.py @@ -35,11 +35,6 @@ DaqWrapper, ) from laboneq.controller.devices.zi_node_monitor import NodeControlBase -from laboneq.controller.recipe_1_4_0 import ( - Initialization, - IntegratorAllocation, - OscillatorParam, -) from laboneq.controller.recipe_processor import ( AwgConfig, AwgKey, @@ -49,6 +44,7 @@ from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.data.recipe import Initialization, IntegratorAllocation, OscillatorParam from laboneq.data.scheduled_experiment import ScheduledExperiment _logger = logging.getLogger(__name__) @@ -256,7 +252,9 @@ def add_downlink(self, port: str, linked_device_uid: str, linked_device: DeviceZ self._downlinks[port] = (linked_device_uid, ref(linked_device)) def add_uplink(self, linked_device: DeviceZI): - self._uplinks.append(ref(linked_device)) + dev_ref = ref(linked_device) + if dev_ref not in self._uplinks: + self._uplinks.append(dev_ref) def remove_all_links(self): self._downlinks.clear() @@ -273,12 +271,12 @@ def is_follower(self): def is_standalone(self): return len(self._uplinks) == 0 and len(self._downlinks) == 0 - def _validate_initialization(self, initialization: Initialization.Data): + def _validate_initialization(self, initialization: Initialization): pass def pre_process_attributes( self, - initialization: Initialization.Data, + initialization: Initialization, ) -> Iterator[DeviceAttribute]: self._validate_initialization(initialization) outputs = initialization.outputs or [] @@ -308,12 +306,12 @@ def pre_process_attributes( ) def collect_initialization_nodes( - self, device_recipe_data: DeviceRecipeData, initialization: Initialization.Data + self, device_recipe_data: DeviceRecipeData, initialization: Initialization ) -> list[DaqNodeAction]: return [] def collect_trigger_configuration_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ) -> list[DaqNodeAction]: return [] @@ -430,7 +428,7 @@ def _get_next_osc_index( def _make_osc_path(self, channel: int, index: int) -> str: return f"/{self.serial}/oscs/{index}/freq" - def allocate_osc(self, osc_param: OscillatorParam.Data): + def allocate_osc(self, osc_param: OscillatorParam): osc_group = self._osc_group_by_channel(osc_param.channel) osc_group_oscs = [o for o in self._allocated_oscs if o.group == osc_group] same_id_osc = next((o for o in osc_group_oscs if o.id == osc_param.id), None) @@ -467,7 +465,7 @@ def configure_acquisition( self, awg_key: AwgKey, awg_config: AwgConfig, - integrator_allocations: list[IntegratorAllocation.Data], + integrator_allocations: list[IntegratorAllocation], averages: int, averaging_mode: AveragingMode, acquisition_type: AcquisitionType, @@ -900,11 +898,11 @@ def collect_osc_initialization_nodes(self) -> list[DaqNodeAction]: return nodes_to_initialize_oscs def collect_awg_before_upload_nodes( - self, initialization: Initialization.Data, recipe_data: RecipeData + self, initialization: Initialization, recipe_data: RecipeData ): return [] - def collect_awg_after_upload_nodes(self, initialization: Initialization.Data): + def collect_awg_after_upload_nodes(self, initialization: Initialization): return [] def collect_execution_nodes(self): diff --git a/laboneq/controller/devices/zi_emulator.py b/laboneq/controller/devices/zi_emulator.py index c31dc56..38baa7f 100644 --- a/laboneq/controller/devices/zi_emulator.py +++ b/laboneq/controller/devices/zi_emulator.py @@ -662,6 +662,7 @@ def __init__(self, host: str, port: int, api_level: int): if api_level is None: api_level = 6 assert api_level == 6 + assert isinstance(port, int) super().__init__() self._scheduler = sched.scheduler() self._device_type_map: dict[str, str] = {} diff --git a/laboneq/controller/near_time_runner.py b/laboneq/controller/near_time_runner.py index 1639a03..7eab3a0 100644 --- a/laboneq/controller/near_time_runner.py +++ b/laboneq/controller/near_time_runner.py @@ -17,10 +17,10 @@ batch_set, ) from laboneq.controller.protected_session import ProtectedSession -from laboneq.controller.recipe_enums import NtStepKey from laboneq.controller.util import LabOneQControllerException, SweepParamsTracker from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.data.recipe import NtStepKey from laboneq.executor.executor import ExecutorBase, LoopFlags, LoopingMode if TYPE_CHECKING: @@ -54,7 +54,9 @@ def user_func_handler(self, func_name: str, args: dict[str, Any]): raise LabOneQControllerException( f"User function '{func_name}' is not registered." ) - res = func(ProtectedSession(self.controller._session), **args) + res = func( + ProtectedSession(self.controller._session, self.controller._results), **args + ) user_func_results = self.controller._results.user_func_results.setdefault( func_name, [] ) diff --git a/laboneq/controller/protected_session.py b/laboneq/controller/protected_session.py index 74347bb..b944963 100644 --- a/laboneq/controller/protected_session.py +++ b/laboneq/controller/protected_session.py @@ -1,10 +1,32 @@ # Copyright 2022 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from typing import Any + from laboneq.controller.util import LabOneQControllerException, SimpleProxy +from laboneq.data.experiment_results import ExperimentResults +from laboneq.dsl.result.results import Results class ProtectedSession(SimpleProxy): + def __init__(self, wrapped_session: Any, experiment_results: ExperimentResults): + super().__init__(wrapped_session) + self._experiment_results = experiment_results + + # Backwards compatibility after migration to the new architecture + @property + def results(self) -> Results: + return self._last_results + + # Backwards compatibility after migration to the new architecture + @property + def _last_results(self) -> Results: + return Results( + acquired_results=self._experiment_results.acquired_results, + user_func_results=self._experiment_results.user_func_results, + execution_errors=self._experiment_results.execution_errors, + ) + def disconnect(self): raise LabOneQControllerException( "'disconnect' is not allowed from the user function." diff --git a/laboneq/controller/recipe_enums.py b/laboneq/controller/recipe_enums.py deleted file mode 100644 index aaab1d9..0000000 --- a/laboneq/controller/recipe_enums.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2019 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -from dataclasses import dataclass -from enum import Enum - - -class SignalType(Enum): - IQ = "iq" - SINGLE = "single" - INTEGRATION = "integration" - MARKER = "marker" - - -class RefClkType(Enum): - _10MHZ = 10 - _100MHZ = 100 - - -class TriggeringMode(Enum): - ZSYNC_FOLLOWER = 1 - DIO_FOLLOWER = 2 - DESKTOP_LEADER = 3 - DESKTOP_DIO_FOLLOWER = 4 - INTERNAL_FOLLOWER = 5 - - -@dataclass(frozen=True) -class NtStepKey: - indices: tuple[int] - - -class AcquisitionType(Enum): - INTEGRATION_TRIGGER = "integration_trigger" - SPECTROSCOPY_IQ = "spectroscopy" - SPECTROSCOPY_PSD = "spectroscopy_psd" - SPECTROSCOPY = SPECTROSCOPY_IQ - DISCRIMINATION = "discrimination" - RAW = "raw" diff --git a/laboneq/controller/recipe_processor.py b/laboneq/controller/recipe_processor.py index f3ad477..1ada7ac 100644 --- a/laboneq/controller/recipe_processor.py +++ b/laboneq/controller/recipe_processor.py @@ -19,6 +19,7 @@ from laboneq.controller.util import LabOneQControllerException from laboneq.core.types.enums.acquisition_type import AcquisitionType from laboneq.core.types.enums.averaging_mode import AveragingMode +from laboneq.data.recipe import IO, Initialization, Recipe, SignalType from laboneq.data.scheduled_experiment import ScheduledExperiment from laboneq.executor.executor import ( ExecutorBase, @@ -28,11 +29,6 @@ Statement, ) -from .recipe_1_4_0 import IO -from .recipe_1_4_0 import Experiment as RecipeExperiment -from .recipe_1_4_0 import Initialization, Recipe -from .recipe_enums import SignalType - if TYPE_CHECKING: from laboneq.controller.devices.device_collection import DeviceCollection @@ -127,7 +123,7 @@ def signal_by_handle(self, handle: str) -> Optional[str]: @dataclass class RecipeData: scheduled_experiment: ScheduledExperiment - recipe: Recipe.Data + recipe: Recipe execution: Sequence result_shapes: HandleResultShapes rt_execution_infos: RtExecutionInfos @@ -137,12 +133,11 @@ class RecipeData: oscillator_ids: list[str] @property - def initializations(self) -> Iterator[Initialization.Data]: - for initialization in self.recipe.experiment.initializations: + def initializations(self) -> Iterator[Initialization]: + for initialization in self.recipe.initializations: yield initialization - def get_initialization_by_device_uid(self, device_uid: str) -> Initialization.Data: - initialization: Initialization.Data + def get_initialization_by_device_uid(self, device_uid: str) -> Initialization: for initialization in self.initializations: if initialization.device_uid == device_uid: return initialization @@ -163,7 +158,7 @@ def awg_config_by_acquire_signal(self, signal_id: str) -> Optional[AwgConfig]: ) -def _pre_process_iq_settings_hdawg(initialization: Initialization.Data): +def _pre_process_iq_settings_hdawg(initialization: Initialization): # TODO(2K): Every pair of outputs with adjacent even+odd channel numbers (starting from 0) # is treated as an I/Q pair. I/Q pairs should be specified explicitly instead. @@ -193,13 +188,9 @@ def _pre_process_iq_settings_hdawg(initialization: Initialization.Data): # Determine I and Q output elements for the IQ pair with index awg_idxs. if output.channel % 2 == 0: i_out = output - q_out = next( - (o for o in outputs if o.channel == output.channel + 1), IO.Data(0) - ) + q_out = next((o for o in outputs if o.channel == output.channel + 1), IO(0)) else: - i_out = next( - (o for o in outputs if o.channel == output.channel - 1), IO.Data(0) - ) + i_out = next((o for o in outputs if o.channel == output.channel - 1), IO(0)) q_out = output if i_out.gains is None or q_out.gains is None: @@ -370,7 +361,7 @@ def _calculate_result_shapes( def _calculate_awg_configs( rt_execution_infos: RtExecutionInfos, - experiment: RecipeExperiment.Data, + recipe: Recipe, ) -> AwgConfigs: awg_configs: AwgConfigs = defaultdict(AwgConfig) @@ -383,17 +374,17 @@ def awg_key_by_acquire_signal(signal_id: str) -> AwgKey: def integrator_index_by_acquire_signal(signal_id: str, is_local: bool) -> int: integrator = next( - ia for ia in experiment.integrator_allocations if ia.signal_id == signal_id + ia for ia in recipe.integrator_allocations if ia.signal_id == signal_id ) # Only relevant for discrimination mode, where only one channel should # be assigned (no multi-state as of now) # TODO(2K): Check if HBAR-1359 affects also SHFQA / global feedback return integrator.channels[0] * (2 if is_local else 1) - for a in experiment.integrator_allocations: + for a in recipe.integrator_allocations: awg_configs[AwgKey(a.device_id, a.awg)].acquire_signals.add(a.signal_id) - for initialization in experiment.initializations: + for initialization in recipe.initializations: device_id = initialization.device_uid for awg in initialization.awgs or []: awg_config = awg_configs[AwgKey(device_id, awg.awg)] @@ -425,7 +416,7 @@ def integrator_index_by_acquire_signal(signal_id: str, is_local: bool) -> int: for signal, sections in rt_execution_info.acquire_sections.items(): awg_key = awg_key_by_acquire_signal(signal) for section in sections: - for acquire_length_info in experiment.acquire_lengths: + for acquire_length_info in recipe.acquire_lengths: if ( acquire_length_info.signal_id == signal and acquire_length_info.section_id == section @@ -453,7 +444,7 @@ def integrator_index_by_acquire_signal(signal_id: str, is_local: bool) -> int: # regardless of the given integrators mask. Masked-out integrators just leave the # value at NaN (corresponds to None in the map). awg_result_map: Dict[str, List[str]] = defaultdict(list) - for acquires in experiment.simultaneous_acquires: + for acquires in recipe.simultaneous_acquires: if any(signal in acquires for signal in awg_config.acquire_signals): for signal in awg_config.acquire_signals: awg_result_map[signal].append(acquires.get(signal)) @@ -474,13 +465,13 @@ def integrator_index_by_acquire_signal(signal_id: str, is_local: bool) -> int: def _pre_process_attributes( - experiment: RecipeExperiment.Data, devices: DeviceCollection + recipe: Recipe, devices: DeviceCollection ) -> tuple[AttributeValueTracker, list[str]]: attribute_value_tracker = AttributeValueTracker() oscillator_ids: list[str] = [] oscillators_check: dict[str, str | float] = {} - for oscillator_param in experiment.oscillator_params: + for oscillator_param in recipe.oscillator_params: value_or_param = oscillator_param.param or oscillator_param.frequency if oscillator_param.id in oscillator_ids: osc_index = oscillator_ids.index(oscillator_param.id) @@ -502,7 +493,7 @@ def _pre_process_attributes( ), ) - for initialization in experiment.initializations: + for initialization in recipe.initializations: device = devices.find_by_uid(initialization.device_uid) for attribute in device.pre_process_attributes(initialization): attribute_value_tracker.add_attribute( @@ -518,19 +509,17 @@ def pre_process_compiled( devices: DeviceCollection, execution: Statement = None, ) -> RecipeData: - recipe: Recipe.Data = Recipe().load(scheduled_experiment.recipe) + recipe = scheduled_experiment.recipe device_settings: DeviceSettings = defaultdict(DeviceRecipeData) - for initialization in recipe.experiment.initializations: + for initialization in recipe.initializations: device_settings[initialization.device_uid] = DeviceRecipeData( iq_settings=_pre_process_iq_settings_hdawg(initialization) ) result_shapes, rt_execution_infos = _calculate_result_shapes(execution) - awg_configs = _calculate_awg_configs(rt_execution_infos, recipe.experiment) - attribute_value_tracker, oscillator_ids = _pre_process_attributes( - recipe.experiment, devices - ) + awg_configs = _calculate_awg_configs(rt_execution_infos, recipe) + attribute_value_tracker, oscillator_ids = _pre_process_attributes(recipe, devices) recipe_data = RecipeData( scheduled_experiment=scheduled_experiment, diff --git a/laboneq/controller/results.py b/laboneq/controller/results.py index 11bbc47..58c3330 100644 --- a/laboneq/controller/results.py +++ b/laboneq/controller/results.py @@ -3,22 +3,13 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any +from typing import Any import numpy as np from numpy.typing import ArrayLike -from laboneq.controller.recipe_enums import NtStepKey - -if TYPE_CHECKING: - from laboneq.dsl.result.acquired_result import AcquiredResult - from laboneq.dsl.result.results import Results - - -def make_empty_results() -> Results: - from laboneq.dsl.result.results import Results - - return Results(acquired_results={}, user_func_results={}, execution_errors=[]) +from laboneq.data.experiment_results import AcquiredResult +from laboneq.data.recipe import NtStepKey def make_acquired_result( @@ -26,8 +17,6 @@ def make_acquired_result( axis_name: list[str | list[str]], axis: list[ArrayLike | list[ArrayLike]], ) -> AcquiredResult: - from laboneq.dsl.result.acquired_result import AcquiredResult - return AcquiredResult(data, axis_name, axis) diff --git a/laboneq/core/types/enums/acquisition_type.py b/laboneq/core/types/enums/acquisition_type.py index 1c55a3d..ad89188 100644 --- a/laboneq/core/types/enums/acquisition_type.py +++ b/laboneq/core/types/enums/acquisition_type.py @@ -1,10 +1,15 @@ # Copyright 2022 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + from enum import Enum from typing import Any, Union +# TODO: Move to laboneq.data. Note that moving the type will cause issues when deserialising +# objects that referred to the class in its old module. Moving the class is therefore +# not as straight-forward as one might naively hope. class AcquisitionType(Enum): """Acquisition type diff --git a/laboneq/data/__init__.py b/laboneq/data/__init__.py index 17c557a..0a25134 100644 --- a/laboneq/data/__init__.py +++ b/laboneq/data/__init__.py @@ -1,2 +1,9 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 + + +class EnumReprMixin: + name: ... + + def __repr__(self): + return f"{self.__class__.__name__}.{self.name}" diff --git a/laboneq/data/calibration/__init__.py b/laboneq/data/calibration/__init__.py index 347fcf5..ee3d219 100644 --- a/laboneq/data/calibration/__init__.py +++ b/laboneq/data/calibration/__init__.py @@ -2,39 +2,31 @@ # SPDX-License-Identifier: Apache-2.0 -# __init__.py of 'calibration' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, auto -from typing import Any, Dict, List, Optional from numpy.typing import ArrayLike +from laboneq.data import EnumReprMixin +from laboneq.data.parameter import Parameter -# -# Enums -# -class CarrierType(Enum): + +class CarrierType(EnumReprMixin, Enum): IF = auto() RF = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class ModulationType(Enum): +class ModulationType(EnumReprMixin, Enum): AUTO = auto() HARDWARE = auto() SOFTWARE = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -# -# Data Classes -# +class PortMode(EnumReprMixin, Enum): + LF = auto() + RF = auto() @dataclass @@ -45,14 +37,14 @@ class BounceCompensation: @dataclass class Calibration: - calibration_items: Dict = field(default_factory=dict) + calibration_items: dict[str, SignalCalibration] = field(default_factory=dict) @dataclass class MixerCalibration: uid: str = None - voltage_offsets: Optional[List[float]] = None - correction_matrix: Optional[List[List[float]]] = None + voltage_offsets: list[float] | None = None + correction_matrix: list[list[float]] | None = None @dataclass @@ -79,7 +71,7 @@ class HighPassCompensation: @dataclass class Oscillator: uid: str = None - frequency: Optional[Any] = None + frequency: float | Parameter = None modulation_type: ModulationType = None carrier_type: CarrierType = None @@ -87,7 +79,35 @@ class Oscillator: @dataclass class Precompensation: uid: str = None - exponential: Optional[List[ExponentialCompensation]] = None - high_pass: Optional[HighPassCompensation] = None - bounce: Optional[BounceCompensation] = None - FIR: Optional[FIRCompensation] = None + exponential: list[ExponentialCompensation] | None = None + high_pass: HighPassCompensation | None = None + bounce: BounceCompensation | None = None + FIR: FIRCompensation | None = None + + +@dataclass +class AmplifierPump: + uid: str = None + pump_freq: float | Parameter | None = None + pump_power: float | Parameter | None = None + cancellation: bool = True + alc_engaged: bool = True + use_probe: bool = False + probe_frequency: float | Parameter | None = None + probe_power: float | Parameter | None = None + + +@dataclass +class SignalCalibration: + oscillator: Oscillator | None = None + local_oscillator_frequency: float | Parameter | None = None + mixer_calibration: MixerCalibration | None = None + precompensation: Precompensation | None = None + port_delay: float | Parameter | None = None + port_mode: PortMode | None = None + delay_signal: float | None = None + voltage_offset: float | None = None + range: int | float | None = None + threshold: float | None = None + amplitude: float | Parameter | None = None + amplifier_pump: AmplifierPump | None = None diff --git a/laboneq/data/compilation_job/__init__.py b/laboneq/data/compilation_job/__init__.py index 1eaa6de..d1cba38 100644 --- a/laboneq/data/compilation_job/__init__.py +++ b/laboneq/data/compilation_job/__init__.py @@ -2,132 +2,217 @@ # SPDX-License-Identifier: Apache-2.0 -# __init__.py of 'compilation_job' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, auto -from typing import Any, Dict, List, Optional +from typing import Optional, Union + +from numpy.typing import ArrayLike + +from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data import EnumReprMixin +from laboneq.data.calibration import ( + BounceCompensation, + ExponentialCompensation, + FIRCompensation, + HighPassCompensation, + PortMode, +) +from laboneq.data.experiment_description import ( + AveragingMode, + ExecutionType, + RepetitionMode, + SectionAlignment, +) # # Enums # -class DeviceInfoType(Enum): - UHFQA = auto() - HDAWG = auto() - SHFQA = auto() - SHFSG = auto() - SHFQC = auto() - PQSC = auto() +class DeviceInfoType(EnumReprMixin, Enum): + UHFQA = "uhfqa" + HDAWG = "hdawg" + SHFQA = "shfqa" + SHFSG = "shfsg" + SHFQC = "shfqc" + PQSC = "pqsc" - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class ReferenceClockSourceInfo(Enum): +class ReferenceClockSourceInfo(EnumReprMixin, Enum): INTERNAL = auto() EXTERNAL = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - -class SectionInfoAlignment(Enum): - LEFT = auto() - RIGHT = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class SignalInfoType(Enum): +class SignalInfoType(EnumReprMixin, Enum): IQ = auto() RF = auto() INTEGRATION = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - # # Data Classes # +@dataclass +class ParameterInfo: + uid: str + start: float | None = None + step: float | None = None + values: ArrayLike | None = None + axis_name: str | None = None + + @dataclass class DeviceInfo: uid: str = None device_type: DeviceInfoType = None + reference_clock: float = None reference_clock_source: ReferenceClockSourceInfo = None + is_qc: bool | None = None @dataclass class OscillatorInfo: uid: str = None - frequency: float = None + frequency: float | ParameterInfo = None is_hardware: bool = None @dataclass class PulseDef: uid: str = None + function: str | None = None length: float = None amplitude: float = None phase: float = None + can_compress: bool = False increment_oscillator_phase: float = None set_oscillator_phase: float = None - precompensation_clear: bool = None - samples: List[float] = field(default_factory=list) - function: Optional[str] = None - pulse_parameters: Optional[Dict] = None + samples: ArrayLike = field(default_factory=list) + pulse_parameters: dict | None = None @dataclass class SectionInfo: uid: str = None length: float = None - alignment: SectionInfoAlignment = None + alignment: SectionAlignment | None = None + handle: str | None = None + state: int | None = None + local: bool | None = None count: int = None - chunk_count: int = None - execution_type: str = None - averaging_type: str = None - children: List[Any] = field(default_factory=list) + chunk_count: int = 1 + execution_type: ExecutionType | None = None + averaging_mode: AveragingMode | None = None + acquisition_type: AcquisitionType | None = None + repetition_mode: RepetitionMode | None = None + repetition_time: float | None = None + reset_oscillator_phase: bool = False + children: list[SectionInfo] = field(default_factory=list) + pulses: list[SectionSignalPulse] = field(default_factory=list) on_system_grid: bool = None - trigger: Dict = field(default_factory=dict) + trigger: list = field(default_factory=list) + parameters: list[ParameterInfo] = field(default_factory=list) + play_after: list[str] = field(default_factory=list) + + +@dataclass +class MixerCalibrationInfo: + voltage_offsets: tuple[float, float] = (0.0, 0.0) + correction_matrix: tuple[tuple[float, float], tuple[float, float]] = ( + (1.0, 0.0), + (0.0, 1.0), + ) + + +@dataclass +class PrecompensationInfo: + exponential: list[ExponentialCompensation] | None = None + high_pass: HighPassCompensation | None = None + bounce: BounceCompensation | None = None + FIR: FIRCompensation | None = None + + +@dataclass +class SignalRange: + value: float + unit: str | None + + +@dataclass +class AmplifierPumpInfo: + pump_freq: float | ParameterInfo | None = None + pump_power: float | ParameterInfo | None = None + cancellation: bool = True + alc_engaged: bool = True + use_probe: bool = False + probe_frequency: float | ParameterInfo | None = None + probe_power: float | ParameterInfo | None = None @dataclass class SignalInfo: uid: str = None device: DeviceInfo = None - oscillators: List[OscillatorInfo] = field(default_factory=list) - channels: List[int] = field(default_factory=list) + oscillator: OscillatorInfo | None = None + channels: list[int] = field(default_factory=list) type: SignalInfoType = None + voltage_offset: float | None = None + mixer_calibration: MixerCalibrationInfo | None = None + precompensation: PrecompensationInfo | None = None + lo_frequency: float | ParameterInfo | None = None + signal_range: SignalRange | None = None + port_delay: float | ParameterInfo | None = None + delay_signal: float | ParameterInfo | None = None + port_mode: PortMode | None = None + threshold: float | None = None + amplitude: float | ParameterInfo | None = None + amplifier_pump: AmplifierPumpInfo | None = None @dataclass class SectionSignalPulse: - uid: str = None section: SectionInfo = None signal: SignalInfo = None - pulse_def: Optional[PulseDef] = None - delay: Optional[float] = None - increment_oscillator_phase: Optional[float] = None - set_oscillator_phase: Optional[float] = None - precompensation_clear: Optional[bool] = None - pulse_parameters: Dict = field(default_factory=dict) - marker: Optional[Dict] = None + pulse_def: PulseDef | None = None + length: float | ParameterInfo | None = None + amplitude: float | ParameterInfo | None = None + phase: float | ParameterInfo | None = None + increment_oscillator_phase: float | ParameterInfo | None = None + set_oscillator_phase: float | ParameterInfo | None = None + precompensation_clear: bool | None = None + pulse_parameters: dict = field(default_factory=dict) + acquire_params: AcquireInfo = None + marker: list[Marker] | None = None + + +@dataclass +class AcquireInfo: + handle: str + acquisition_type: str + + +@dataclass +class Marker: + marker_selector: str + enable: bool + start: float + length: float + pulse_id: str @dataclass class ExperimentInfo: uid: str = None - signals: List[SignalInfo] = field(default_factory=list) - sections: List[Any] = field(default_factory=list) - section_signal_pulses: List[SectionSignalPulse] = field(default_factory=list) - global_leader_device: Optional[DeviceInfo] = None - pulse_defs: List[PulseDef] = field(default_factory=list) + signals: list[SignalInfo] = field(default_factory=list) + sections: list[SectionInfo] = field(default_factory=list) + section_signal_pulses: list[SectionSignalPulse] = field( + default_factory=list + ) # todo: remove + global_leader_device: DeviceInfo | None = None # todo: remove + pulse_defs: list[PulseDef] = field(default_factory=list) @dataclass diff --git a/laboneq/data/execution_payload/__init__.py b/laboneq/data/execution_payload/__init__.py index 6a44eff..f1cc396 100644 --- a/laboneq/data/execution_payload/__init__.py +++ b/laboneq/data/execution_payload/__init__.py @@ -1,34 +1,28 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 - -# __init__.py of 'execution_payload' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, auto -from typing import Any, Dict, List from numpy.typing import ArrayLike +from laboneq.data import EnumReprMixin from laboneq.data.scheduled_experiment import ScheduledExperiment -from laboneq.data.setup_description import Connection # # Enums # -class ServerType(Enum): +class ServerType(EnumReprMixin, Enum): DATA_SERVER = auto() WEB_SERVER = auto() SCOPE_SERVER = auto() POWER_SWITCH_SERVER = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class TargetDeviceType(Enum): +class TargetDeviceType(EnumReprMixin, Enum): UHFQA = auto() HDAWG = auto() SHFQA = auto() @@ -37,41 +31,10 @@ class TargetDeviceType(Enum): PQSC = auto() NONQC = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - -class NearTimeOperationType(Enum): - CALL = auto() - ACQUIRE = auto() - SET = auto() - PLAY_PULSE = auto() - DELAY = auto() - RESERVE = auto() - ACQUIRE_LOOP_RT = auto() - ACQUIRE_LOOP_NT = auto() - NO_OPERATION = auto() - SET_SOFTWARE_PARM = auto() - FOR_LOOP = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - -class LoopType(Enum): - SWEEP = auto() - AVERAGE = auto() - HARDWARE = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - # # Data Classes # - - @dataclass class TargetServer: uid: str = None @@ -81,32 +44,7 @@ class TargetServer: api_level: int = None -@dataclass -class InitializationConfiguration: - reference_clock: float = None - triggering_mode: str = None - - -@dataclass -class JobParameter: - uid: str = None - values: List[Any] = field(default_factory=list) - axis_name: str = None - - -@dataclass -class NtStepKey: - indices: List[int] = field(default_factory=list) - - -@dataclass -class SourceCode: - uid: str = None - file_name: str = None - source_text: str = None - - -class TargetChannelType(Enum): +class TargetChannelType(EnumReprMixin, Enum): UNKNOWN = auto() IQ = auto() RF = auto() @@ -136,29 +74,10 @@ class TargetDevice: @dataclass -class Initialization: - uid: str = None - device: TargetDevice = None - source_code: SourceCode = None - config: InitializationConfiguration = None - - -@dataclass -class NearTimeOperation: +class SourceCode: uid: str = None - operation_type: NearTimeOperationType = None - children: List[Any] = field(default_factory=list) - args: Dict[str, Any] = field(default_factory=dict) - parameters: List[JobParameter] = field(default_factory=list) - - -@dataclass -class RealTimeExecutionInit: - device: TargetDevice = None - awg_id: int = None - seqc: SourceCode = None - wave_indices_ref: str = None - nt_step: NtStepKey = None + file_name: str = None # TODO(2K): This field currently acts as the uid, not requiring a separate file name if the uid is explicit. + source_text: str = None @dataclass @@ -169,26 +88,11 @@ class WaveForm: samples: ArrayLike = None -@dataclass -class NearTimeProgram: - uid: str = None - children: List[NearTimeOperation] = field(default_factory=list) - - -@dataclass -class Recipe: - uid: str = None - initializations: List[Initialization] = field(default_factory=list) - realtime_execution_init: List[RealTimeExecutionInit] = field(default_factory=list) - waveforms: List[WaveForm] = field(default_factory=list) - measurement_map: Dict[str, str] = field(default_factory=dict) - - @dataclass class TargetSetup: uid: str = None - servers: List[TargetServer] = field(default_factory=list) - devices: List[TargetDevice] = field(default_factory=list) + servers: list[TargetServer] = field(default_factory=list) + devices: list[TargetDevice] = field(default_factory=list) @dataclass @@ -198,7 +102,7 @@ class ExecutionPayload: compiled_experiment_hash: str = None experiment_hash: str = None device_setup_hash: str = None - src: List[SourceCode] = field(default_factory=list) - recipe: Recipe = None - near_time_program: NearTimeProgram = None + src: list[SourceCode] = field( + default_factory=list + ) # TODO(2K): Controller uses scheduled_experiment.src scheduled_experiment: ScheduledExperiment | None = None diff --git a/laboneq/data/execution_payload/execution_payload_helper.py b/laboneq/data/execution_payload/execution_payload_helper.py deleted file mode 100644 index 8c3002f..0000000 --- a/laboneq/data/execution_payload/execution_payload_helper.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2023 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -from laboneq.core.serialization.simple_serialization import serialize_to_dict -from laboneq.data.execution_payload import NearTimeProgram - - -class ExecutionPayloadHelper: - @staticmethod - def dump_near_time_program(near_time_program: NearTimeProgram): - return serialize_to_dict(near_time_program) - - @staticmethod - def descend(current_node, visitor, context, parent): - for c in current_node.children: - ExecutionPayloadHelper.descend(c, visitor, context, current_node) - visitor(current_node, context, parent) - - @staticmethod - def accept_near_time_program_visitor( - near_time_program: NearTimeProgram, visitor, context=None - ): - ExecutionPayloadHelper.descend(near_time_program, visitor, context, None) diff --git a/laboneq/data/experiment_description/__init__.py b/laboneq/data/experiment_description/__init__.py index 15b3e62..40fa3ab 100644 --- a/laboneq/data/experiment_description/__init__.py +++ b/laboneq/data/experiment_description/__init__.py @@ -2,85 +2,51 @@ # SPDX-License-Identifier: Apache-2.0 -# __init__.py of 'experiment_description' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, auto -from numbers import Number -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional from numpy.typing import ArrayLike +from laboneq.core.types.enums.acquisition_type import AcquisitionType +from laboneq.data import EnumReprMixin +from laboneq.data.calibration import SignalCalibration +from laboneq.data.parameter import Parameter # # Enums # -class AcquisitionType(Enum): - DISCRIMINATION = auto() - INTEGRATION = auto() - RAW = auto() - SPECTROSCOPY = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class AveragingMode(Enum): +class AveragingMode(EnumReprMixin, Enum): CYCLIC = auto() SEQUENTIAL = auto() SINGLE_SHOT = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class ExecutionType(Enum): +class ExecutionType(EnumReprMixin, Enum): NEAR_TIME = auto() REAL_TIME = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class RepetitionMode(Enum): +class RepetitionMode(EnumReprMixin, Enum): AUTO = auto() CONSTANT = auto() FASTEST = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class SectionAlignment(Enum): +class SectionAlignment(EnumReprMixin, Enum): LEFT = auto() RIGHT = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - # # Data Classes # -@dataclass -class SignalCalibration: - uid: str = None - oscillator: Optional[Any] = None - local_oscillator: Optional[Any] = None - mixer_calibration: Optional[Any] = None - precompensation: Optional[Any] = None - port_delay: Optional[Any] = None - port_mode: Optional[Any] = None - delay_signal: Optional[Any] = None - voltage_offset: Optional[Any] = None - range: Optional[Any] = None - threshold: Optional[Any] = None - amplitude: Optional[Any] = None - - @dataclass class Operation: """Operation.""" @@ -99,29 +65,25 @@ class SignalOperation(Operation): @dataclass class ExperimentSignal: uid: str = None - calibration: Optional[Any] = None - - -@dataclass -class Parameter: - uid: str = None + calibration: Optional[SignalCalibration] = None @dataclass class Pulse: uid: str = None + can_compress: bool = False @dataclass class Section: uid: str = None alignment: SectionAlignment = None - execution_type: Optional[ExecutionType] = None - length: Optional[float] = None - play_after: List[str] = field(default_factory=list) - children: List[Operation] = field(default_factory=list) - trigger: Dict = field(default_factory=dict) - on_system_grid: Optional[bool] = None + execution_type: ExecutionType | None = None + length: float | None = None + play_after: list[str | Section] = field(default_factory=list) + children: list[Operation | Section] = field(default_factory=list) + trigger: dict = field(default_factory=dict) + on_system_grid: bool | None = None @dataclass @@ -143,7 +105,7 @@ class AcquireLoopNt(Section): @dataclass class AcquireLoopRt(Section): uid: str = None - acquisition_type: AcquisitionType = None + acquisition_type: AcquisitionType = AcquisitionType.INTEGRATION averaging_mode: AveragingMode = None count: int = None execution_type: ExecutionType = None @@ -173,59 +135,48 @@ class Delay(SignalOperation): @dataclass class Experiment: uid: str = None - signals: Union[Dict[str, ExperimentSignal], List[ExperimentSignal]] = None + signals: List[ExperimentSignal] = field(default_factory=list) epsilon: float = None sections: List[Section] = field(default_factory=list) pulses: List[Pulse] = field(default_factory=list) -@dataclass -class LinearSweepParameter(Parameter): - uid: str = None - start: Number = None - stop: Number = None - count: int = None - axis_name: str = None - - @dataclass class Match(Section): uid: str = None - handle: str = None - local: bool = None + handle: Optional[str] = None + user_register: Optional[int] = None + local: Optional[bool] = None @dataclass class PlayPulse(SignalOperation): pulse: Pulse = None - amplitude: Union[float, complex, Parameter] = None - increment_oscillator_phase: Parameter = None - phase: float = None - set_oscillator_phase: float = None - length: Parameter = None - pulse_parameters: Optional[Dict] = None - precompensation_clear: Optional[bool] = None - marker: Optional[Dict] = None + amplitude: float | complex | Parameter = None + increment_oscillator_phase: float | Parameter = None + phase: float | Parameter = None + set_oscillator_phase: float | Parameter = None + length: float | Parameter = None + pulse_parameters: dict | None = None + precompensation_clear: bool | None = None + marker: dict | Optional = None @dataclass class PulseFunctional(Pulse): - uid: str = None function: str = None - amplitude: float = None + amplitude: float | Parameter = None length: float = None - pulse_parameters: Optional[Dict] = None + pulse_parameters: dict | None = None @dataclass class PulseSampledComplex(Pulse): - uid: str = None samples: ArrayLike = None @dataclass class PulseSampledReal(Pulse): - uid: str = None samples: ArrayLike = None @@ -236,22 +187,12 @@ class Reserve(SignalOperation): @dataclass class Set(Operation): - uid: str = None path: str = None - key: str = None value: Any = None @dataclass class Sweep(Section): - uid: str = None parameters: List[Parameter] = field(default_factory=list) reset_oscillator_phase: bool = None execution_type: ExecutionType = None - - -@dataclass -class SweepParameter(Parameter): - uid: str = None - values: ArrayLike = None - axis_name: str = None diff --git a/laboneq/data/experiment_description/experiment_helper.py b/laboneq/data/experiment_description/experiment_helper.py index d19d200..590ef36 100644 --- a/laboneq/data/experiment_description/experiment_helper.py +++ b/laboneq/data/experiment_description/experiment_helper.py @@ -3,7 +3,7 @@ import numpy as np -from laboneq.data.experiment_description import Parameter +from laboneq.data.parameter import Parameter class ExperimentHelper: diff --git a/laboneq/data/experiment_results/__init__.py b/laboneq/data/experiment_results/__init__.py index d6f8d93..e236eaa 100644 --- a/laboneq/data/experiment_results/__init__.py +++ b/laboneq/data/experiment_results/__init__.py @@ -2,37 +2,67 @@ # SPDX-License-Identifier: Apache-2.0 -# __init__.py of 'experiment_results' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional +from typing import Any from numpy.typing import ArrayLike -# -# Enums -# - -# -# Data Classes -# +from laboneq.core.validators import dicts_equal @dataclass class AcquiredResult: - data: ArrayLike = None - axis_name: List = field(default_factory=list) - axis: Optional[Any] = None - last_nt_step: List[int] = field(default_factory=list) + """ + This class represents the results acquired for an 'acquire' event. + + The acquired result is a triple consisting of actual data, axis name(s) + and one or more axes + """ + + #: A multidimensional numpy array, where each dimension corresponds to a sweep loop + #: nesting level, the outermost sweep being the first dimension. + data: ArrayLike | None = None + + #: A list of axis names. Each element may be either a string or a list of strings. + axis_name: list[str | list[str]] = field(default_factory=list) + + #: A list of axis grids. Each element may be either a 1D numpy array or a list of + #: such arrays. + axis: list[ArrayLike | list[ArrayLike]] = field(default_factory=list) + + #: A list of axis indices that represent the last measured near-time point. Only + #: covers outer near-time dimensions. + last_nt_step: list[int] | None = None + + def __eq__(self, other: AcquiredResult): + return ( + dicts_equal(self.data, other.data) + and self.axis_name == other.axis_name + and dicts_equal(self.axis, other.axis) + and self.last_nt_step == other.last_nt_step + ) @dataclass class ExperimentResults: uid: str = None - acquired_results: Dict[str, AcquiredResult] = field(default_factory=dict) - user_func_results: Dict = field(default_factory=dict) - execution_errors: List = field(default_factory=list) + + #: The acquired results, organized by handle. + acquired_results: dict[str, AcquiredResult] = field(default_factory=dict) + + #: List of the results of each user user function, by name of the function. + user_func_results: dict[str, list[Any]] = field(default_factory=dict) + + #: Any exceptions that occurred during the execution of the experiment. Entries are + #: tuples of + #: + #: * the indices of the loops where the error occurred, + #: * the section uid, + #: * the error message. + execution_errors: list[tuple[list[int], str, str]] = field(default_factory=list) + experiment_hash: str = None compiled_experiment_hash: str = None execution_payload_hash: str = None diff --git a/laboneq/data/parameter/__init__.py b/laboneq/data/parameter/__init__.py new file mode 100644 index 0000000..aae7b7a --- /dev/null +++ b/laboneq/data/parameter/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from dataclasses import dataclass + +from numpy.typing import ArrayLike + + +@dataclass +class Parameter: + uid: str = None + + +@dataclass +class LinearSweepParameter(Parameter): + uid: str = None + start: float | complex = None + stop: float | complex = None + count: int = None + axis_name: str = None + + +@dataclass +class SweepParameter(Parameter): + values: ArrayLike = None + axis_name: str = None diff --git a/laboneq/data/recipe.py b/laboneq/data/recipe.py new file mode 100644 index 0000000..713960a --- /dev/null +++ b/laboneq/data/recipe.py @@ -0,0 +1,142 @@ +# Copyright 2019 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import Enum +from typing import Any + + +class SignalType(Enum): + IQ = "iq" + SINGLE = "single" + INTEGRATION = "integration" + MARKER = "marker" + + +class RefClkType(Enum): + _10MHZ = 10 + _100MHZ = 100 + + +class TriggeringMode(Enum): + ZSYNC_FOLLOWER = 1 + DIO_FOLLOWER = 2 + DESKTOP_LEADER = 3 + DESKTOP_DIO_FOLLOWER = 4 + INTERNAL_FOLLOWER = 5 + + +@dataclass(frozen=True) +class NtStepKey: + indices: tuple[int] + + +@dataclass +class Gains: + diagonal: float + off_diagonal: float + + +@dataclass +class IO: + channel: int + enable: bool | None = None + modulation: bool | None = None + oscillator: int | None = None + oscillator_frequency: int | None = None + offset: float | None = None + gains: Gains | None = None + range: float | None = None + range_unit: str | None = None + precompensation: dict[str, dict] | None = None + lo_frequency: Any | None = None + port_mode: str | None = None + port_delay: Any | None = None + scheduler_port_delay: float = 0.0 + delay_signal: float | None = None + marker_mode: str | None = None + amplitude: Any | None = None + + +@dataclass +class AWG: + awg: int + signal_type: SignalType = SignalType.SINGLE + qa_signal_id: str | None = None + command_table_match_offset: int | None = None + feedback_register: int | None = None + + +@dataclass +class Measurement: + length: int + channel: int = 0 + + +@dataclass +class Config: + repetitions: int = 1 + reference_clock: RefClkType = None + holdoff: float = 0 + triggering_mode: TriggeringMode = TriggeringMode.DIO_FOLLOWER + sampling_rate: float | None = None + + +@dataclass +class Initialization: + device_uid: str + config: Config = field(default_factory=Config) + awgs: list[AWG] = None + outputs: list[IO] = None + inputs: list[IO] = None + measurements: list[Measurement] = field(default_factory=list) + ppchannels: list[dict[str, Any]] | None = None + + +@dataclass +class OscillatorParam: + id: str + device_id: str + channel: int + frequency: float = None + param: str = None + + +@dataclass +class IntegratorAllocation: + signal_id: str + device_id: str + awg: int + channels: list[int] + weights: str = None + threshold: float = 0.0 + + +@dataclass +class AcquireLength: + section_id: str + signal_id: str + acquire_length: int + + +@dataclass +class RealtimeExecutionInit: + device_id: str + awg_id: int + seqc_ref: str + wave_indices_ref: str + nt_step: NtStepKey + + +@dataclass +class Recipe: + initializations: list[Initialization] = field(default_factory=list) + realtime_execution_init: list[RealtimeExecutionInit] = field(default_factory=list) + oscillator_params: list[OscillatorParam] = field(default_factory=list) + integrator_allocations: list[IntegratorAllocation] = field(default_factory=list) + acquire_lengths: list[AcquireLength] = field(default_factory=list) + simultaneous_acquires: list[dict[str, str]] = field(default_factory=list) + total_execution_time: float = None + max_step_execution_time: float = None diff --git a/laboneq/data/scheduled_experiment.py b/laboneq/data/scheduled_experiment.py index fb2bce4..999473f 100644 --- a/laboneq/data/scheduled_experiment.py +++ b/laboneq/data/scheduled_experiment.py @@ -8,21 +8,20 @@ from typing import Any from laboneq.core.validators import dicts_equal +from laboneq.data import EnumReprMixin +from laboneq.data.recipe import Recipe # # Enums # -class MixerType(Enum): +class MixerType(EnumReprMixin, Enum): #: Mixer performs full complex modulation IQ = auto() #: Mixer only performs envelope modulation (UHFQA-style) UHFQA_ENVELOPE = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - # # Data Classes @@ -73,7 +72,7 @@ class ScheduledExperiment: uid: str = None #: Instructions to the controller for running the experiment. - recipe: dict[str, Any] = None + recipe: Recipe = None #: The seqC source code, per device. src: list[dict[str, str]] = None diff --git a/laboneq/data/setup_description/__init__.py b/laboneq/data/setup_description/__init__.py index 960cd78..2793fbc 100644 --- a/laboneq/data/setup_description/__init__.py +++ b/laboneq/data/setup_description/__init__.py @@ -1,57 +1,30 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 - -# __init__.py of 'setup_description' package - autogenerated, do not edit from __future__ import annotations from dataclasses import dataclass, field from enum import Enum, auto -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List + +from laboneq.data import EnumReprMixin +from laboneq.data.calibration import Calibration # # Enums # -class IODirection(Enum): +class IODirection(EnumReprMixin, Enum): IN = auto() OUT = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - -class IOSignalType(Enum): - DIO = auto() - I = auto() - IQ = auto() - LO = auto() - Q = auto() - RF = auto() - SINGLE = auto() - ZSYNC = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - - -class PortMode(Enum): - LF = auto() - RF = auto() - - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class ReferenceClockSource(Enum): +class ReferenceClockSource(EnumReprMixin, Enum): EXTERNAL = auto() INTERNAL = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class DeviceType(Enum): +class DeviceType(EnumReprMixin, Enum): HDAWG = auto() NonQC = auto() PQSC = auto() @@ -60,16 +33,16 @@ class DeviceType(Enum): UHFQA = auto() SHFQC = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" - -class PhysicalChannelType(Enum): +class PhysicalChannelType(EnumReprMixin, Enum): IQ_CHANNEL = auto() RF_CHANNEL = auto() - def __repr__(self): - return f"{self.__class__.__name__}.{self.name}" + +class PortType(EnumReprMixin, Enum): + RF = "RF" + DIO = "DIO" + ZSYNC = "ZSYNC" # @@ -79,28 +52,36 @@ def __repr__(self): @dataclass class LogicalSignal: - uid: str = None - name: str = None - path: str = None - direction: IODirection = None + name: str + group: str # Needed for referencing. TODO(MH): Remove @dataclass class PhysicalChannel: - uid: str = None + name: str type: PhysicalChannelType = None + direction: IODirection = None + ports: List[Port] = None @dataclass -class Connection: - physical_channel: PhysicalChannel = None - logical_signal: LogicalSignal = None +class ChannelMapEntry: + """A mapping between physical and logical signal.""" + + physical_channel: PhysicalChannel + logical_signal: LogicalSignal + + +@dataclass +class ReferenceClock: + source: ReferenceClockSource = ReferenceClockSource.EXTERNAL + frequency: float | None = None @dataclass class Port: - path: str = None - physical_channel: Optional[PhysicalChannel] = None + path: str + type: PortType @dataclass @@ -109,16 +90,17 @@ class Server: api_level: int = None host: str = None leader_uid: str = None - port: Any = None + port: int = None @dataclass class Instrument: uid: str = None interface: str = None + reference_clock: ReferenceClock = ReferenceClock() ports: List[Port] = field(default_factory=list) physical_channels: List[PhysicalChannel] = field(default_factory=list) - connections: List[Connection] = field(default_factory=list) + connections: List[ChannelMapEntry] = field(default_factory=list) address: str = None device_type: DeviceType = None server: Server = None @@ -130,24 +112,19 @@ class LogicalSignalGroup: logical_signals: Dict[str, LogicalSignal] = field(default_factory=dict) -@dataclass -class QuantumElement: - uid: str = None - signals: List[LogicalSignal] = field(default_factory=list) - parameters: List = field(default_factory=list) - - @dataclass class SetupInternalConnection: - from_instrument: Instrument = None - from_port: Port = None - to_instrument: Instrument = None + from_instrument: Instrument + from_port: Port + to_instrument: Instrument + to_port: Port @dataclass -class PhysicalChannelToLogicalSignalMapping: - physical_channel: PhysicalChannel = None - logical_signal: LogicalSignal = None +class QuantumElement: + uid: str = None + signals: List[LogicalSignal] = field(default_factory=list) + parameters: List = field(default_factory=list) @dataclass @@ -164,4 +141,4 @@ class Setup: setup_internal_connections: List[SetupInternalConnection] = field( default_factory=list ) - calibration: Any = None + calibration: Calibration = None diff --git a/laboneq/data/setup_description/setup_helper.py b/laboneq/data/setup_description/setup_helper.py index 4e6953d..d1b53d6 100644 --- a/laboneq/data/setup_description/setup_helper.py +++ b/laboneq/data/setup_description/setup_helper.py @@ -15,15 +15,10 @@ class SetupHelper: @classmethod def get_instrument_of_logical_signal(cls, setup: Setup, logical_signal_path: str): + grp, name = logical_signal_path.split("/") for i in setup.instruments: for c in i.connections: - search_path = logical_signal_path - # todo: this is a hack to make the path comparison work - # to fix this, make sure paths are generated correctly - if not search_path.startswith("/logical_signal_groups/"): - search_path = "/logical_signal_groups/" + search_path - - if c.logical_signal.path == search_path: + if grp == c.logical_signal.group and name == c.logical_signal.name: return i raise Exception("No instrument found for logical signal " + logical_signal_path) @@ -35,30 +30,26 @@ def get_flat_logcial_signals(cls, setup: Setup): return logical_signals @classmethod - def get_connections_of_logical_signal(cls, setup: Setup, logical_signal_path: str): + def get_ports_of_logical_signal(cls, setup: Setup, logical_signal_path): instrument = cls.get_instrument_of_logical_signal(setup, logical_signal_path) - connections = [] + ports = [] + grp, name = logical_signal_path.split("/") for c in instrument.connections: - if c.logical_signal.path == "/logical_signal_groups/" + logical_signal_path: - connections.append(c) - return connections + if c.logical_signal.name == name and grp == c.logical_signal.group: + for ch_port in c.physical_channel.ports: + if ch_port not in ports: + ports.append(ch_port) + return ports @classmethod - def get_ports_of_logical_signal(cls, setup: Setup, logical_signal_path): + def get_connections_of_logical_signal(cls, setup: Setup, logical_signal_path: str): instrument = cls.get_instrument_of_logical_signal(setup, logical_signal_path) - ports = [] - physical_channels = [] - search_path = logical_signal_path - if not search_path.startswith("/logical_signal_groups/"): - search_path = "/logical_signal_groups/" + search_path - + grp, name = logical_signal_path.split("/") + connections = [] for c in instrument.connections: - if c.logical_signal.path == search_path: - physical_channels.append(c.physical_channel) - for p in instrument.ports: - if p.physical_channel in physical_channels: - ports.append(p) - return ports + if c.logical_signal.name == name and grp == c.logical_signal.group: + connections.append(c) + return connections @classmethod def flat_logical_signals(cls, setup: Setup): diff --git a/laboneq/dsl/device/instruments/nonqc.py b/laboneq/dsl/device/instruments/nonqc.py index 9be4475..888eee6 100644 --- a/laboneq/dsl/device/instruments/nonqc.py +++ b/laboneq/dsl/device/instruments/nonqc.py @@ -10,7 +10,8 @@ @classformatter @dataclass(init=True, repr=True, order=True) class NonQC(ZIStandardInstrument): - """Class representing a ZI instrument that is of type not directly handled by L1Q.""" + """Class representing a ZI instrument that is of type not directly handled by + LabOne Q.""" dev_type: str = None diff --git a/laboneq/dsl/experiment/builtins.py b/laboneq/dsl/experiment/builtins.py new file mode 100644 index 0000000..dd655f3 --- /dev/null +++ b/laboneq/dsl/experiment/builtins.py @@ -0,0 +1,263 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import typing +from functools import wraps +from typing import Any + +import numpy as np + +from laboneq.core.exceptions import LabOneQException +from laboneq.core.types.enums import AcquisitionType, AveragingMode, RepetitionMode +from laboneq.dsl import Parameter +from laboneq.dsl.calibration import Calibration +from laboneq.dsl.device.io_units import LogicalSignal +from laboneq.dsl.experiment.experiment_context import ( + ExperimentContext, + current_experiment_context, +) +from laboneq.dsl.experiment.pulse import Pulse + +if typing.TYPE_CHECKING: + from laboneq.dsl.experiment import Experiment, Section + +__all__ = [ + "acquire", + "acquire_loop_rt", + "add", + "call", + "case", + "delay", + "experiment_calibration", + "experiment", + "for_each", + "map_signal", + "match", + "measure", + "play", + "qubit_experiment", + "reserve", + "section", + "set_node", + "sweep", + "sweep_range", +] + +from laboneq.dsl.experiment.section_context import ( + AcquireLoopNtSectionContext, + AcquireLoopRtSectionContext, + CaseSectionContext, + MatchSectionContext, + SectionSectionContext, + SweepSectionContext, + active_section, +) + + +def _active_experiment() -> Experiment: + context = current_experiment_context() + if not context: + raise LabOneQException("Not in an experiment context") + return context.experiment + + +def section(*args, **kwargs): + return SectionSectionContext(*args, **kwargs) + + +def sweep(*args, parameter=None, **kwargs): + parameters = parameter if isinstance(parameter, list) else [parameter] + return SweepSectionContext(*args, parameters=parameters, **kwargs) + + +def acquire_loop_rt( + count, + averaging_mode=AveragingMode.CYCLIC, + repetition_mode=RepetitionMode.FASTEST, + repetition_time=None, + acquisition_type=AcquisitionType.INTEGRATION, + uid=None, + reset_oscillator_phase=False, +): + return AcquireLoopRtSectionContext( + count=count, + averaging_mode=averaging_mode, + repetition_mode=repetition_mode, + repetition_time=repetition_time, + acquisition_type=acquisition_type, + uid=uid, + reset_oscillator_phase=reset_oscillator_phase, + ) + + +def acquire_loop_nt(*args, **kwargs): + return AcquireLoopNtSectionContext(*args, **kwargs) + + +def match( + handle: str | None = None, + user_register: int | None = None, + uid: str = None, + play_after: str | list[str] | None = None, +): + return MatchSectionContext( + handle=handle, user_register=user_register, uid=uid, play_after=play_after + ) + + +def case(state: int, uid: str = None): + return CaseSectionContext(state=state, uid=uid) + + +def call(funcname, **kwargs): + return active_section().call(funcname, **kwargs) + + +def play( + signal, + pulse, + amplitude=None, + phase=None, + increment_oscillator_phase=None, + set_oscillator_phase=None, + length=None, + pulse_parameters: dict[str, Any] | None = None, + precompensation_clear: bool | None = None, + marker=None, +): + return active_section().play( + signal=signal, + pulse=pulse, + amplitude=amplitude, + phase=phase, + increment_oscillator_phase=increment_oscillator_phase, + set_oscillator_phase=set_oscillator_phase, + length=length, + pulse_parameters=pulse_parameters, + precompensation_clear=precompensation_clear, + marker=marker, + ) + + +def delay( + signal: str, + time: float | Parameter, + precompensation_clear: bool | None = None, +): + return active_section().delay( + signal=signal, time=time, precompensation_clear=precompensation_clear + ) + + +def reserve(signal): + return active_section().reserve(signal) + + +def acquire( + signal: str, + handle: str, + kernel: Pulse = None, + length: float = None, + pulse_parameters: dict[str, Any] | None = None, +): + return active_section().acquire( + signal=signal, + handle=handle, + kernel=kernel, + length=length, + pulse_parameters=pulse_parameters, + ) + + +def measure( + acquire_signal: str, + handle: str, + integration_kernel: Pulse | None = None, + integration_kernel_parameters: dict[str, Any] | None = None, + integration_length: float | None = None, + measure_signal: str | None = None, + measure_pulse: Pulse | None = None, + measure_pulse_length: float | None = None, + measure_pulse_parameters: dict[str, Any] | None = None, + measure_pulse_amplitude: float | None = None, + acquire_delay: float | None = None, + reset_delay: float | None = None, +): + return active_section().measure( + acquire_signal=acquire_signal, + handle=handle, + integration_kernel=integration_kernel, + integration_kernel_parameters=integration_kernel_parameters, + integration_length=integration_length, + measure_signal=measure_signal, + measure_pulse=measure_pulse, + measure_pulse_length=measure_pulse_length, + measure_pulse_parameters=measure_pulse_parameters, + measure_pulse_amplitude=measure_pulse_amplitude, + acquire_delay=acquire_delay, + reset_delay=reset_delay, + ) + + +def add(section: Section): + try: + parent = active_section() + except LabOneQException: + parent = _active_experiment() + parent.add(section) + + +def set_node(path: str, value: Any): + return active_section().set(path=path, value=value) + + +def sweep_range(start, stop, count, uid=None, axis_name=None, **kwargs): + from laboneq.dsl import LinearSweepParameter + + param = LinearSweepParameter( + start=start, stop=stop, count=count, axis_name=axis_name + ) + return sweep(uid=uid or axis_name, parameter=param, **kwargs) + + +def for_each(iterable, uid=None, axis_name=None, **kwargs): + from laboneq.dsl import SweepParameter + + param = SweepParameter(values=np.array(iterable), axis_name=axis_name) + return sweep(uid=uid or axis_name, parameter=param, **kwargs) + + +def experiment(uid=None, signals=None): + return ExperimentContext(uid=uid, signals=signals) + + +def qubit_experiment(qubits: list, **kwargs): + def decorator(f): + @wraps(f) + def wrapper(*inner_args, **inner_kwargs): + context = ExperimentContext( + uid=f.__name__, + signals=[s for q in qubits for s in q.experiment_signals()], + ) + with context: + f(*inner_args, **inner_kwargs) + return context.experiment + + return wrapper + + return decorator + + +def experiment_calibration(): + """Get the calibration of the experiment in construction""" + context = current_experiment_context() + if context is None: + raise LabOneQException("Not in an experiment context") + context.calibration = context.calibration or Calibration() + return context.calibration + + +def map_signal(experiment_signal_uid: str, logical_signal: LogicalSignal): + _active_experiment().map_signal(experiment_signal_uid, logical_signal) diff --git a/laboneq/dsl/experiment/context.py b/laboneq/dsl/experiment/context.py new file mode 100644 index 0000000..4ab1fee --- /dev/null +++ b/laboneq/dsl/experiment/context.py @@ -0,0 +1,49 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import abc +import threading + + +class Context(abc.ABC): + @abc.abstractmethod + def __enter__(self): + raise NotImplementedError + + @abc.abstractmethod + def __exit__(self, exc_type, exc_val, exc_tb): + raise NotImplementedError + + @abc.abstractmethod + def __call__(self, f): + raise NotImplementedError + + @abc.abstractmethod + def add(self, section): + raise NotImplementedError + + +_store = threading.local() +_store.active_contexts = [] + + +def push_context(context): + _store.active_contexts.append(context) + + +def peek_context(): + return _store.active_contexts[-1] if len(_store.active_contexts) else None + + +def pop_context(): + return _store.active_contexts.pop() + + +def iter_contexts(): + return iter(_store.active_contexts) + + +def current_context() -> Context | None: + return _store.active_contexts[-1] if len(_store.active_contexts) else None diff --git a/laboneq/dsl/experiment/experiment.py b/laboneq/dsl/experiment/experiment.py index 04f6173..7ac2e0e 100644 --- a/laboneq/dsl/experiment/experiment.py +++ b/laboneq/dsl/experiment/experiment.py @@ -3,6 +3,7 @@ from __future__ import annotations +import threading from collections import deque from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, Deque, Dict, List, Optional, Union @@ -540,7 +541,9 @@ def __init__( def __enter__(self): self.exp._push_section(self.sweep) - return self.sweep + if len(self.sweep.parameters) == 1: + return self.sweep.parameters[0] + return tuple(self.sweep.parameters) def __exit__(self, exc_type, exc_val, exc_tb): self.exp._pop_and_add_section() @@ -669,7 +672,7 @@ def section( alignment=None, uid=None, on_system_grid=None, - play_after: Optional[Union[str, List[str]]] = None, + play_after: Optional[Union[str, Section, List[Union[str, Section]]]] = None, trigger: Optional[Dict[str, Dict[str, int]]] = None, ): """Define an section for scoping operations. @@ -694,7 +697,7 @@ def section( alignment: Alignment of the operations in the section. Defaults to :class:`~.SectionAlignment.LEFT`. play_after: Play this section after the end of the section(s) with the - given ID(s) (single string or list of strings). Defaults to None. + given ID(s). Defaults to None. trigger: Play a pulse a trigger pulse for the duration of this section. See below for details. on_system_grid: If True, the section boundaries are always rounded to the @@ -830,7 +833,7 @@ def match_local( self, handle: str, uid: str = None, - play_after: str | list[str] | None = None, + play_after: Optional[Union[str, Section, List[Union[str, Section]]]] = None, ): """Define a section which switches between different child sections based on a QA measurement on an SHFQC. @@ -847,18 +850,23 @@ def match_local( handle: A unique identifier string that allows to retrieve the acquired data. play_after: Play this section after the end of the section(s) with the - given ID(s) (single string or list of strings). Defaults to None. + given ID(s). Defaults to None. """ return Experiment._MatchSectionContext( - self, uid=uid, handle=handle, play_after=play_after, local=True + self, + uid=uid, + handle=handle, + user_register=None, + play_after=play_after, + local=True, ) def match_global( self, handle: str, uid: str = None, - play_after: str | list[str] | None = None, + play_after: Optional[Union[str, Section, List[Union[str, Section]]]] = None, ): """Define a section which switches between different child sections based on a QA measurement via the PQSC. @@ -875,11 +883,16 @@ def match_global( handle: A unique identifier string that allows to retrieve the acquired data. play_after: Play this section after the end of the section(s) with the - given ID(s) (single string or list of strings). Defaults to None. + given ID(s). Defaults to None. """ return Experiment._MatchSectionContext( - self, uid=uid, handle=handle, play_after=play_after, local=False + self, + uid=uid, + handle=handle, + user_register=None, + play_after=play_after, + local=False, ) class _MatchSectionContext: @@ -888,6 +901,7 @@ def __init__( experiment, uid, handle, + user_register, local, play_after=None, ): @@ -898,6 +912,7 @@ def __init__( if play_after is not None: args["play_after"] = play_after args["local"] = local + args["user_register"] = user_register self.section = Match(**args) @@ -910,32 +925,45 @@ def __exit__(self, exc_type, exc_val, exc_tb): def match( self, - handle: str, + handle: Optional[str] = None, + user_register: Optional[int] = None, uid: str = None, - play_after: str | list[str] | None = None, + play_after: Optional[Union[str, Section, List[Union[str, Section]]]] = None, ): """Define a section which switches between different child sections based - on a QA measurement. + on a QA measurement (using ``handle``) or a user register (using ``user_register``). - The feedback path (local, or global, via PQSC) is chosen automatically. + In case of the QA measurement option, the feedback path (local, or global, + via PQSC) is chosen automatically. Match needs to open a scope in the following way:: with exp.match(...): # here come the different branches to be selected - :note: Only subsections of type ``Case`` are allowed. + :note: + Only subsections of type ``Case`` are allowed. Exactly one of ``handle`` or + ``user_register`` must be specified, the other one must be None. The user register + is evaluated only at the beginning of the experiment, not during the experiment, + and only a few user registers per AWG can be used due to the limited number of + processor registers. Args: uid: The unique ID for this section. handle: A unique identifier string that allows to retrieve the acquired data. + user_register: The user register to use for the match. play_after: Play this section after the end of the section(s) with the - given ID(s) (single string or list of strings). Defaults to None. + given ID(s). Defaults to None. """ return Experiment._MatchSectionContext( - self, uid=uid, handle=handle, play_after=play_after, local=None + self, + uid=uid, + handle=handle, + user_register=user_register, + play_after=play_after, + local=None, ) def case(self, state: int, uid: str = None): @@ -1021,3 +1049,31 @@ def all_sections(self): for s in self.sections: retval.extend(Experiment._all_subsections(s)) return retval + + +_store = threading.local() +_store.active_contexts = [] + + +class ExperimentContext: + def __init__(self, experiment: Experiment): + self.experiment = experiment + self.calibration = None + + def __enter__(self): + _store.active_contexts.append(self) + return self.experiment + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.calibration is not None: + self.experiment.set_calibration(self.calibration) + _store.active_contexts.pop() + + +def current_context() -> ExperimentContext: + try: + return _store.active_contexts[-1] + except IndexError as e: + raise LabOneQException( + "Not in an experiment context. Use '@experiment' to create an experiment scope first." + ) from e diff --git a/laboneq/dsl/experiment/experiment_context.py b/laboneq/dsl/experiment/experiment_context.py new file mode 100644 index 0000000..7d0ec36 --- /dev/null +++ b/laboneq/dsl/experiment/experiment_context.py @@ -0,0 +1,61 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from functools import wraps + +from laboneq.dsl.experiment.context import ( + Context, + iter_contexts, + pop_context, + push_context, +) +from laboneq.dsl.experiment.experiment import Experiment + + +class ExperimentContext(Context): + def __init__(self, *, uid=None, signals=None): + self.kwargs = {} + if uid is not None: + self.kwargs["uid"] = uid + if signals is not None: + self.kwargs["signals"] = signals + + self.calibration = None + self.experiment = None + + def __enter__(self): + """Use as a context manager to define experiment context""" + + push_context(self) + self.experiment = Experiment(**self.kwargs) + return self.experiment + + def __exit__(self, exc_type, exc_val, exc_tb): + assert pop_context() is self + if self.calibration is not None and exc_val is None: + self.experiment.set_calibration(self.calibration) + + def __call__(self, f): + """Use as a decorator for a function defining the context""" + + if "uid" not in self.kwargs: + self.kwargs["uid"] = f.__name__ + + @wraps(f) + def wrapper(*inner_args, **inner_kwargs): + with self: + f(*inner_args, **inner_kwargs) + return self.experiment + + return wrapper + + def add(self, section): + self.experiment.sections.append(section) + + +def current_experiment_context() -> ExperimentContext | None: + for c in iter_contexts(): + if isinstance(c, ExperimentContext): + return c diff --git a/laboneq/dsl/experiment/pulse_library.py b/laboneq/dsl/experiment/pulse_library.py index 665f4b1..902e524 100644 --- a/laboneq/dsl/experiment/pulse_library.py +++ b/laboneq/dsl/experiment/pulse_library.py @@ -208,7 +208,7 @@ def sawtooth(x, **_): @register_pulse_functional -def drag(x, sigma=1 / 3, beta=1.0, zero_boundaries=False, **_): +def drag(x, sigma=1 / 3, beta=0.2, zero_boundaries=False, **_): """Create a DRAG pulse Args: diff --git a/laboneq/dsl/experiment/section.py b/laboneq/dsl/experiment/section.py index e49bfa2..ef3d4f3 100644 --- a/laboneq/dsl/experiment/section.py +++ b/laboneq/dsl/experiment/section.py @@ -56,11 +56,13 @@ class Section: length: Optional[float] = field(default=None) #: Play after the section with the given ID. - play_after: Optional[Union[str, List[str]]] = field(default=None) + play_after: Optional[Union[str, Section, List[Union[str, Section]]]] = field( + default=None + ) #: List of children. Each child may be another section or an operation. children: List[Union[Section, dsl.experiment.operation.Operation]] = field( - default_factory=list, compare=False + default_factory=list ) #: Optional trigger pulses to play during this section. See :meth:`~.Experiment.section`. @@ -354,10 +356,13 @@ class Match(Section): """Execute one of the child branches depending on feedback result.""" #: Handle from which to obtain results - handle: str = "" + handle: Optional[str] = None + + #: User register on which to match + user_register: Optional[int] = None - #: Whether to go via the PQSC (False) or SHFQC (True) - local: bool = False + #: Whether to fetch the codeword via the PQSC (False), SHFQC-internal bus (True) or automatic (None) + local: bool | None = None def add(self, case: Case): """Add a branch to which to switch. diff --git a/laboneq/dsl/experiment/section_context.py b/laboneq/dsl/experiment/section_context.py new file mode 100644 index 0000000..5e2fe16 --- /dev/null +++ b/laboneq/dsl/experiment/section_context.py @@ -0,0 +1,238 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from functools import wraps + +from laboneq.core.exceptions import LabOneQException +from laboneq.core.types.enums import ( + AcquisitionType, + AveragingMode, + ExecutionType, + RepetitionMode, +) +from laboneq.dsl.experiment.context import ( + Context, + current_context, + peek_context, + pop_context, + push_context, +) +from laboneq.dsl.experiment.section import ( + AcquireLoopNt, + AcquireLoopRt, + Case, + Match, + Section, + Sweep, +) + + +class SectionContextBase(Context): + section_class = ... + + def __init__(self): + self.section = None + self.kwargs = {} + self._auto_add = True + + def __enter__(self): + self.section = self.section_class(**self.kwargs) + parent = current_context() + if self.section.execution_type is None: + if parent is not None: + if isinstance(parent, SectionContextBase): + self.section.execution_type = parent.section.execution_type + elif self.section.execution_type == ExecutionType.NEAR_TIME: + if parent is not None and isinstance(parent, SectionContextBase): + if parent.section.execution_type == ExecutionType.REAL_TIME: + raise LabOneQException( + "Cannot nest near-time section inside real-time context" + ) + if self.section.execution_type is None: + self.section.execution_type = ExecutionType.NEAR_TIME + push_context(self) + return self.section + + def __exit__(self, exc_type, exc_val, exc_tb): + assert pop_context() is self + if exc_val is None and self._auto_add: + # auto-add section to parent + parent = current_context() + if parent is not None: + parent.add(self.section) + + def __call__(self, f): + raise NotImplementedError + + def add(self, section): + self.section.add(section) + + +class SectionSectionContext(SectionContextBase): + section_class = Section + + def __init__( + self, + length=None, + alignment=None, + uid=None, + on_system_grid=None, + play_after: str | list[str] | None = None, + trigger: dict[str, dict[str, int]] | None = None, + execution_type=None, + ): + super().__init__() + if uid is not None: + self.kwargs["uid"] = uid + if length is not None: + self.kwargs["length"] = length + if alignment is not None: + self.kwargs["alignment"] = alignment + if play_after is not None: + self.kwargs["play_after"] = play_after + if trigger is not None: + self.kwargs["trigger"] = trigger + if on_system_grid is not None: + self.kwargs["on_system_grid"] = on_system_grid + if execution_type is not None: + self.kwargs["execution_type"] = execution_type + + def __call__(self, f): + """Use as a decorator for a function defining the context""" + + if "uid" not in self.kwargs: + self.kwargs["uid"] = f.__name__ + + @wraps(f) + def wrapper(*inner_args, section_auto_add=True, **inner_kwargs): + self._auto_add = bool(section_auto_add) + with self: + f(*inner_args, **inner_kwargs) + return self.section + + return wrapper + + def add(self, section): + self.section.add(section) + + +class SweepSectionContext(SectionContextBase): + section_class = Sweep + + def __init__( + self, + parameters, + execution_type=None, + uid=None, + alignment=None, + reset_oscillator_phase=False, + chunk_count=1, + ): + super().__init__() + self.kwargs = {"parameters": parameters} + if uid is not None: + self.kwargs["uid"] = uid + if execution_type is not None: + self.kwargs["execution_type"] = execution_type + + if alignment is not None: + self.kwargs["alignment"] = alignment + + if reset_oscillator_phase is not None: + self.kwargs["reset_oscillator_phase"] = reset_oscillator_phase + + self.kwargs["chunk_count"] = chunk_count + + def __enter__(self): + super().__enter__() + if len(self.section.parameters) == 1: + return self.section.parameters[0] + return tuple(self.section.parameters) + + +class AcquireLoopNtSectionContext(SectionContextBase): + section_class = AcquireLoopNt + + def __init__(self, count, averaging_mode=AveragingMode.CYCLIC, uid=None): + super().__init__() + self.kwargs = dict( + count=count, + averaging_mode=averaging_mode, + ) + if uid is not None: + self.kwargs["uid"] = uid + + +class AcquireLoopRtSectionContext(SectionContextBase): + section_class = AcquireLoopRt + + def __init__( + self, + count=None, + averaging_mode=AveragingMode.CYCLIC, + repetition_mode=RepetitionMode.FASTEST, + repetition_time=None, + acquisition_type=AcquisitionType.INTEGRATION, + reset_oscillator_phase=False, + uid=None, + ): + super().__init__() + self.kwargs = dict( + count=count, + averaging_mode=averaging_mode, + repetition_mode=repetition_mode, + repetition_time=repetition_time, + acquisition_type=acquisition_type, + reset_oscillator_phase=reset_oscillator_phase, + ) + if uid is not None: + self.kwargs["uid"] = uid + + +class MatchSectionContext(SectionContextBase): + section_class = Match + + def __init__( + self, + handle: str | None = None, + user_register: int | None = None, + uid=None, + play_after=None, + ): + super().__init__() + if uid is not None: + self.kwargs["uid"] = uid + if play_after is not None: + self.kwargs["play_after"] = play_after + if handle is not None: + self.kwargs["handle"] = handle + if user_register is not None: + self.kwargs["user_register"] = user_register + + +class CaseSectionContext(SectionContextBase): + section_class = Case + + def __init__( + self, + uid, + state, + ): + super().__init__() + self.kwargs["state"] = state + if uid is not None: + self.kwargs["uid"] = uid + + def __enter__(self): + if not isinstance(peek_context(), MatchSectionContext): + raise LabOneQException("Case section must be inside a Match section") + return super().__enter__() + + +def active_section(): + s = peek_context() + if s is None or not isinstance(s, SectionContextBase): + raise LabOneQException("Must be in a section context") + return s.section diff --git a/laboneq/dsl/laboneq_facade.py b/laboneq/dsl/laboneq_facade.py index cef74a3..d646a8c 100644 --- a/laboneq/dsl/laboneq_facade.py +++ b/laboneq/dsl/laboneq_facade.py @@ -11,7 +11,9 @@ from laboneq import controller as ctrl from laboneq.compiler.workflow.compiler import Compiler from laboneq.core.types import CompiledExperiment -from laboneq.dsl.new_arch_support import convert_dsl_to_target_setup +from laboneq.implementation.legacy_adapters.converters_target_setup import ( + convert_dsl_to_target_setup, +) if TYPE_CHECKING: from laboneq.dsl.experiment.pulse import Pulse diff --git a/laboneq/dsl/parameter.py b/laboneq/dsl/parameter.py index 96fd78d..af72aff 100644 --- a/laboneq/dsl/parameter.py +++ b/laboneq/dsl/parameter.py @@ -40,9 +40,88 @@ class Parameter(ABC): uid: str = field(default_factory=parameter_id_generator) +class _ParameterArithmeticMixin: + values: ArrayLike + + def __add__(self, other): + new_param = SweepParameter(values=self.values + other) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __radd__(self, other): + new_param = SweepParameter(values=other + self.values) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __sub__(self, other): + new_param = SweepParameter(values=self.values - other) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __rsub__(self, other): + new_param = SweepParameter(values=other - self.values) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __mul__(self, other): + new_param = SweepParameter(values=self.values * other) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __rmul__(self, other): + new_param = SweepParameter(values=other * self.values) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __truediv__(self, other): + new_param = SweepParameter(values=self.values / other) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __rtruediv__(self, other): + new_param = SweepParameter(values=other / self.values) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __pow__(self, other): + new_param = SweepParameter(values=self.values**other) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __rpow__(self, other): + new_param = SweepParameter(values=other**self.values) + new_param.driven_by = [self] + if hasattr(other, "uid"): + new_param.driven_by.append(other) + return new_param + + def __neg__(self): + new_param = SweepParameter(values=-self.values) + new_param.driven_by = [self] + return new_param + + @classformatter @dataclass(init=True, repr=True, order=True) -class SweepParameter(Parameter): +class SweepParameter(_ParameterArithmeticMixin, Parameter): """An arbitrary sweep parameter.""" #: An arbitrary numpy array whose values are used as the sweep parameter. @@ -53,11 +132,15 @@ class SweepParameter(Parameter): #: If this argument is not defined, the uid of the object will be used instead. axis_name: str = field(default=None) + driven_by: list[SweepParameter] | None = field(default=None) + def __eq__(self, other): if self is other: return True - return self.axis_name == other.axis_name and _compare_nested( - self.values, other.values + return ( + self.axis_name == other.axis_name + and _compare_nested(self.values, other.values) + and self.driven_by == getattr(other, "driven_by", None) ) def __len__(self) -> int: @@ -66,7 +149,7 @@ def __len__(self) -> int: @classformatter @dataclass(init=True, repr=True, order=True) -class LinearSweepParameter(Parameter): +class LinearSweepParameter(_ParameterArithmeticMixin, Parameter): """A linear sweep parameter""" #: The starting value of the parameter sweep. diff --git a/laboneq/dsl/quantum/__init__.py b/laboneq/dsl/quantum/__init__.py index 0e9fd4f..a650d0f 100644 --- a/laboneq/dsl/quantum/__init__.py +++ b/laboneq/dsl/quantum/__init__.py @@ -1,5 +1,7 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -from laboneq.dsl.quantum.quantum_operations import QuantumOperation -from laboneq.dsl.quantum.qubits import QuantumElement, Qubit, QubitParameters +from laboneq.dsl.quantum.quantum_element import QuantumElement, QuantumElementSignalMap +from laboneq.dsl.quantum.quantum_operation import QuantumOperation +from laboneq.dsl.quantum.qubit import Qubit, QubitParameters +from laboneq.dsl.quantum.transmon import Transmon, TransmonParameters diff --git a/laboneq/dsl/quantum/qubits.py b/laboneq/dsl/quantum/quantum_element.py similarity index 52% rename from laboneq/dsl/quantum/qubits.py rename to laboneq/dsl/quantum/quantum_element.py index ceee87d..c2ad017 100644 --- a/laboneq/dsl/quantum/qubits.py +++ b/laboneq/dsl/quantum/quantum_element.py @@ -5,12 +5,13 @@ import uuid from abc import ABC, abstractmethod from collections.abc import MutableMapping -from dataclasses import dataclass, field +from dataclasses import dataclass from enum import Enum from typing import Any, Callable, Dict, List, Optional, Tuple, Union from laboneq.core.exceptions import LabOneQException -from laboneq.dsl.calibration import Calibration, Oscillator, SignalCalibration +from laboneq.dsl.calibration import Calibration +from laboneq.dsl.device import LogicalSignalGroup from laboneq.dsl.device.io_units import LogicalSignal from laboneq.dsl.dsl_dataclass_decorator import classformatter from laboneq.dsl.experiment import ExperimentSignal @@ -19,6 +20,7 @@ class SignalType(Enum): DRIVE = "drive" + DRIVE_EF = "drive_ef" MEASURE = "measure" ACQUIRE = "acquire" FLUX = "flux" @@ -71,7 +73,7 @@ def __repr__(self): @classformatter @dataclass(init=False, repr=True) class QuantumElement(ABC): - """An abstract base class for quantum elements.""" + """An abstract base class for quantum elements like Qubits or tunable couplers etc.""" uid: str signals: Dict[str, str] @@ -93,12 +95,16 @@ def __init__( """ self.uid = uuid.uuid4().hex if uid is None else uid if signals is None: - signals = QuantumElementSignalMap({}) - if isinstance(signals, dict): + self.signals = QuantumElementSignalMap( + {}, key_validator=self._validate_signal_type + ) + elif isinstance(signals, dict): sigs = { k: self._resolve_to_logical_signal_uid(v) for k, v in signals.items() } - self.signals = QuantumElementSignalMap(sigs) + self.signals = QuantumElementSignalMap( + sigs, key_validator=self._validate_signal_type + ) else: self.signals = signals self._parameters = {} if parameters is None else parameters @@ -110,9 +116,47 @@ def __hash__(self): def _resolve_to_logical_signal_uid(signal: Union[str, LogicalSignal]) -> str: return signal.path if isinstance(signal, LogicalSignal) else signal + @staticmethod + def _validate_signal_type(name: str) -> str: + try: + SignalType(name) + return name + except ValueError: + raise LabOneQException( + f"Signal {name} is not one of {[enum.value for enum in SignalType]}" + ) + + # @classmethod + def _from_logical_signal_group( + self, + uid: str, + lsg: LogicalSignalGroup, + parameters: Optional[Dict[str, Any]] = None, + signal_type_map: Dict[SignalType, List[str]] = None, + ) -> "QuantumElement": + """Quantum Element from logical signal group. + + Args: + uid: A unique identifier for the Qubit. + lsg: Logical signal group. + Accepted names for logical signals depend on the qubit class used + parameters: Parameters associated with the qubit. + signal_types: a mapping between accepted logical signal names and SignalTypes + """ + signal_map = {} + for name, signal in lsg.logical_signals.items(): + signal_value = name + for signal_type, id_list in signal_type_map.items(): + if name in id_list: + signal_value = signal_type.value + signal_map[signal_value] = self._resolve_to_logical_signal_uid(signal) + return self( + uid=uid, signals=QuantumElementSignalMap(signal_map), parameters=parameters + ) + @property def parameters(self): - """Parameters of the element.""" + """Parameters of the quantum element.""" return self._parameters @classmethod @@ -168,177 +212,28 @@ def calibration(self) -> Calibration: """Calibration of the Quantum element.""" pass - def experiment_signals(self) -> List[ExperimentSignal]: - """Experiment signals of the quantum element.""" - sigs = [] - for k, v in self.calibration().items(): - sig = ExperimentSignal( - uid=k, - calibration=v, - map_to=k, - ) - sigs.append(sig) - return sigs - - -@classformatter -@dataclass -class QubitParameters: - #: Resonance frequency of the qubit. - res_frequency: float - #: Local oscillator frequency. - lo_frequency: float - #: Readout resonance frequency of the qubit. - readout_res_frequency: float - #: Readout local oscillator frequency. - readout_lo_frequency: float - #: Free form dictionary of user defined parameters. - user_defs: Dict = field(default_factory=dict) - - @property - def drive_frequency(self) -> float: - """Qubit drive frequency.""" - return self.res_frequency - self.lo_frequency - - @property - def readout_frequency(self) -> float: - """Readout baseband frequency.""" - return self.readout_res_frequency - self.readout_lo_frequency - - -@classformatter -@dataclass(init=False, repr=True, eq=False) -class Qubit(QuantumElement): - """A class for a generic Qubit.""" - - def __init__( - self, - uid: str = None, - signals: Dict[str, LogicalSignal] = None, - parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, - ): - """ - Initializes a new Qubit. - - Args: - uid: A unique identifier for the Qubit. - signals: A mapping of logical signals associated with the qubit. - Qubit accepts the following keys in the mapping: 'drive', 'measure', 'acquire', 'flux' - - This is so that the Qubit parameters are assigned into the correct signal lines in - calibration. - parameters: Parameters associated with the qubit. - Required for generating calibration and experiment signals via `calibration()` and `experiment_signals()`. - """ - if isinstance(parameters, dict): - parameters = QubitParameters(**parameters) - if signals is None: - signals = QuantumElementSignalMap( - {}, key_validator=self._validate_signal_type - ) - if isinstance(signals, dict): - sigs = { - k: self._resolve_to_logical_signal_uid(v) for k, v in signals.items() - } - signals = QuantumElementSignalMap( - sigs, key_validator=self._validate_signal_type - ) - super().__init__(uid, signals, parameters) - - @staticmethod - def _validate_signal_type(name: str) -> str: - try: - SignalType(name) - return name - except ValueError: - raise LabOneQException( - f"Signal {name} is not one of {[enum.value for enum in SignalType]}" - ) - - @classmethod - def from_logical_signal_group( - cls, - uid: str, - lsg, - parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, - ) -> "Qubit": - """Qubit from logical signal group. - - Args: - uid: A unique identifier for the Qubit. - lsg: Logical signal group. - Qubit understands the following signal line names: - - - drive: 'drive', 'drive_line' - - measure: 'measure', 'measure_line' - - acquire: 'acquire', 'acquire_line' - - flux: 'flux', 'flux_line' - - This is so that the Qubit parameters are assigned into the correct signal lines in - calibration. - parameters: Parameters associated with the qubit. - """ - signal_map = {} - for name, sig in lsg.logical_signals.items(): - sig_type = name - if name in ["drive", "drive_line"]: - sig_type = SignalType.DRIVE.value - if name in ["measure", "measure_line"]: - sig_type = SignalType.MEASURE.value - if name in ["acquire", "acquire_line"]: - sig_type = SignalType.ACQUIRE.value - if name in ["flux", "flux_line"]: - sig_type = SignalType.FLUX.value - signal_map[sig_type] = cls._resolve_to_logical_signal_uid(sig) - return cls( - uid=uid, signals=QuantumElementSignalMap(signal_map), parameters=parameters - ) - - def calibration(self) -> Calibration: - """Generate calibration from the parameters and attached signal lines. - - `Qubit` requires `parameters` for it to be able to produce calibration objects. - - Returns: - Prefilled calibration object from Qubit parameters. - """ - calibs = {} - if "drive" in self.signals: - calibs[self.signals["drive"]] = SignalCalibration( - oscillator=Oscillator( - uid=f"{self.uid}_drive_osc", - frequency=self.parameters.drive_frequency, - ) - ) - if "measure" in self.signals: - calibs[self.signals["measure"]] = SignalCalibration( - oscillator=Oscillator( - uid=f"{self.uid}_measure_osc", - frequency=self.parameters.readout_frequency, - ) - ) - if "acquire" in self.signals: - calibs[self.signals["acquire"]] = SignalCalibration( - oscillator=Oscillator( - uid=f"{self.uid}_acquire_osc", - frequency=self.parameters.readout_frequency, - ) - ) - if "flux" in self.signals: - calibs[self.signals["flux"]] = SignalCalibration() - return Calibration(calibs) - def experiment_signals( - self, with_types=False + self, + with_types=False, + with_calibration=False, ) -> Union[List[ExperimentSignal], List[Tuple[SignalType, ExperimentSignal]]]: """Experiment signals of the quantum element. - `Qubit` requires `parameters` for it to be able to produce experiment signals. - Args: - with_types: Return a list of tuples which consist of an mapped logical signal type and an experiment signal. + with_types: When true, return a list of tuples which consist of a mapped logical signal + type and an experiment signal. Otherwise, just return the experiment signals. + with_calibration: Apply the qubit's calibration to the ExperimentSignal. """ - exp_signals = super().experiment_signals() + + if not with_calibration: + exp_signals = [ + ExperimentSignal(uid=k, map_to=k) for k in self.signals.values() + ] + else: + exp_signals = [ + ExperimentSignal(uid=k, calibration=v, map_to=k) + for k, v in self.calibration().items() + ] if with_types: sigs = [] for exp_sig in exp_signals: diff --git a/laboneq/dsl/quantum/quantum_operations.py b/laboneq/dsl/quantum/quantum_operation.py similarity index 98% rename from laboneq/dsl/quantum/quantum_operations.py rename to laboneq/dsl/quantum/quantum_operation.py index 80c2f7c..9b28fcb 100644 --- a/laboneq/dsl/quantum/quantum_operations.py +++ b/laboneq/dsl/quantum/quantum_operation.py @@ -7,7 +7,7 @@ from typing import Dict, Optional, Tuple, Union from laboneq.dsl.experiment.section import Section -from laboneq.dsl.quantum.qubits import QuantumElement +from laboneq.dsl.quantum.qubit import QuantumElement from laboneq.dsl.serialization import Serializer QuantumElementTuple = Tuple[QuantumElement, ...] diff --git a/laboneq/dsl/quantum/qubit.py b/laboneq/dsl/quantum/qubit.py new file mode 100644 index 0000000..2a29354 --- /dev/null +++ b/laboneq/dsl/quantum/qubit.py @@ -0,0 +1,168 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from dataclasses import dataclass, field +from typing import Any, Dict, Optional, Union + +from laboneq.dsl.calibration import Calibration, Oscillator, SignalCalibration +from laboneq.dsl.device import LogicalSignalGroup +from laboneq.dsl.device.io_units import LogicalSignal +from laboneq.dsl.dsl_dataclass_decorator import classformatter +from laboneq.dsl.enums import ModulationType +from laboneq.dsl.quantum.quantum_element import QuantumElement, SignalType + + +@classformatter +@dataclass +class QubitParameters: + #: Resonance frequency of the qubit. + resonance_frequency: float + #: Local oscillator frequency for the qubit drive line. + drive_lo_frequency: float + #: Resonance frequency of the readout resonators used to read the state of the qubit. + readout_resonator_frequency: float + #: Local oscillator frequency for the readout lines. + readout_lo_frequency: float + #: integration delay between readout pulse and data acquisition, defaults to 20 ns. + readout_integration_delay: Optional[float] = 20e-9 + #: drive power setting, defaults to 10 dBm. + drive_range: Optional[float] = 10 + #: readout output power setting, defaults to 5 dBm. + readout_range_out: Optional[float] = 5 + #: readout input power setting, defaults to 10 dBm. + readout_range_in: Optional[float] = 10 + #: offset voltage for flux control line - defaults to 0. + flux_offset_voltage: Optional[float] = 0 + #: Free form dictionary of user defined parameters. + user_defined: Optional[Dict] = field(default_factory=dict) + + @property + def drive_frequency(self) -> float: + """Qubit drive frequency.""" + return self.resonance_frequency - self.drive_lo_frequency + + @property + def readout_frequency(self) -> float: + """Readout baseband frequency.""" + return self.readout_resonator_frequency - self.readout_lo_frequency + + +@classformatter +@dataclass(init=False, repr=True, eq=False) +class Qubit(QuantumElement): + """A class for a generic two-level Qubit.""" + + def __init__( + self, + uid: str = None, + signals: Dict[str, LogicalSignal] = None, + parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, + ): + """ + Initializes a new Qubit. + + Args: + uid: A unique identifier for the Qubit. + signals: A mapping of logical signals associated with the qubit. + Qubit accepts the following keys in the mapping: 'drive', 'measure', 'acquire', 'flux' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + Required for generating calibration and experiment signals via `calibration()` and `experiment_signals()`. + """ + if isinstance(parameters, dict): + parameters = QubitParameters(**parameters) + super().__init__(uid=uid, signals=signals, parameters=parameters) + + @classmethod + def from_logical_signal_group( + cls, + uid: str, + lsg: LogicalSignalGroup, + parameters: Optional[Union[QubitParameters, Dict[str, Any]]] = None, + ) -> "Qubit": + """Qubit from logical signal group. + + Args: + uid: A unique identifier for the Qubit. + lsg: Logical signal group. + Qubit understands the following signal line names: + + - drive: 'drive', 'drive_line' + - measure: 'measure', 'measure_line' + - acquire: 'acquire', 'acquire_line' + - flux: 'flux', 'flux_line' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + """ + signal_type_map = { + SignalType.DRIVE: ["drive", "drive_line"], + SignalType.MEASURE: ["measure", "measure_line"], + SignalType.ACQUIRE: ["acquire", "acquire_line"], + SignalType.FLUX: ["flux", "flux_line"], + } + return cls._from_logical_signal_group( + cls, + uid=uid, + lsg=lsg, + parameters=parameters, + signal_type_map=signal_type_map, + ) + + def calibration(self) -> Calibration: + """Generate calibration from the qubits parameters and signal lines. + + `Qubit` requires `parameters` for it to be able to produce a calibration object. + + Returns: + Prefilled calibration object from Qubit parameters. + """ + calib = {} + + drive_lo = Oscillator( + uid=f"{self.uid}_drive_local_osc", + frequency=self.parameters.drive_lo_frequency, + ) + readout_lo = Oscillator( + uid=f"{self.uid}_readout_local_osc", + frequency=self.parameters.readout_lo_frequency, + ) + + if "drive" in self.signals: + calib[self.signals["drive"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_drive_osc", + frequency=self.parameters.drive_frequency, + modulation_type=ModulationType.HARDWARE, + ), + local_oscillator=drive_lo, + range=self.parameters.drive_range, + ) + if "measure" in self.signals: + calib[self.signals["measure"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_measure_osc", + frequency=self.parameters.readout_frequency, + modulation_type=ModulationType.SOFTWARE, + ), + local_oscillator=readout_lo, + range=self.parameters.readout_range_out, + ) + if "acquire" in self.signals: + calib[self.signals["acquire"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_acquire_osc", + frequency=self.parameters.readout_frequency, + modulation_type=ModulationType.SOFTWARE, + ), + local_oscillator=readout_lo, + range=self.parameters.readout_range_out, + ) + if "flux" in self.signals: + calib[self.signals["flux"]] = SignalCalibration( + voltage_offset=self.parameters.flux_offset_voltage, + ) + return Calibration(calib) diff --git a/laboneq/dsl/quantum/transmon.py b/laboneq/dsl/quantum/transmon.py new file mode 100644 index 0000000..72f6059 --- /dev/null +++ b/laboneq/dsl/quantum/transmon.py @@ -0,0 +1,195 @@ +# Copyright 2022 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +from dataclasses import dataclass, field +from typing import Any, Dict, Optional, Union + +from laboneq.dsl.calibration import Calibration, Oscillator, SignalCalibration +from laboneq.dsl.device import LogicalSignalGroup +from laboneq.dsl.device.io_units import LogicalSignal +from laboneq.dsl.dsl_dataclass_decorator import classformatter +from laboneq.dsl.enums import ModulationType +from laboneq.dsl.quantum.quantum_element import QuantumElement, SignalType + + +@classformatter +@dataclass +class TransmonParameters: + #: Resonance frequency of the qubits g-e transition. + resonance_frequency_ge: float + #: Resonance frequency of the qubits e-f transition. + resonance_frequency_ef: float + #: Local oscillator frequency for the drive signals. + drive_lo_frequency: float + #: Readout resonantor frequency of the qubit. + readout_resonator_frequency: float + #: local oscillator frequency for the readout lines. + readout_lo_frequency: float + #: integration delay between readout pulse and data acquisition, defaults to 20 ns. + readout_integration_delay: Optional[float] = 20e-9 + #: drive power setting, defaults to 10 dBm. + drive_range: Optional[float] = 10 + #: readout output power setting, defaults to 5 dBm. + readout_range_out: Optional[float] = 5 + #: readout input power setting, defaults to 10 dBm. + readout_range_in: Optional[float] = 10 + #: offset voltage for flux control line - defaults to 0. + flux_offset_voltage: Optional[float] = 0 + #: Free form dictionary of user defined parameters. + user_defined: Optional[Dict] = field(default_factory=dict) + + @property + def drive_frequency_ge(self) -> float: + """Qubit drive frequency.""" + return self.resonance_frequency_ge - self.drive_lo_frequency + + @property + def drive_frequency_ef(self) -> float: + """Qubit drive frequency.""" + return self.resonance_frequency_ef - self.drive_lo_frequency + + @property + def readout_frequency(self) -> float: + """Readout baseband frequency.""" + return self.readout_resonator_frequency - self.readout_lo_frequency + + +@classformatter +@dataclass(init=False, repr=True, eq=False) +class Transmon(QuantumElement): + """A class for a superconducting, flux-tuneable Transmon Qubit.""" + + def __init__( + self, + uid: str = None, + signals: Dict[str, LogicalSignal] = None, + parameters: Optional[Union[TransmonParameters, Dict[str, Any]]] = None, + ): + """ + Initializes a new Transmon Qubit. + + Args: + uid: A unique identifier for the Qubit. + signals: A mapping of logical signals associated with the qubit. + Qubit accepts the following keys in the mapping: 'drive', 'measure', 'acquire', 'flux' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + Required for generating calibration and experiment signals via `calibration()` and `experiment_signals()`. + """ + if isinstance(parameters, dict): + parameters = TransmonParameters(**parameters) + super().__init__(uid=uid, signals=signals, parameters=parameters) + + @classmethod + def from_logical_signal_group( + cls, + uid: str, + lsg: LogicalSignalGroup, + parameters: Optional[Union[TransmonParameters, Dict[str, Any]]] = None, + ) -> "Transmon": + """Transmon Qubit from logical signal group. + + Args: + uid: A unique identifier for the Qubit. + lsg: Logical signal group. + Transmon Qubit understands the following signal line names: + + - drive: 'drive', 'drive_line' + - drive_ef: 'drive_ef', 'drive_line_ef' + - measure: 'measure', 'measure_line' + - acquire: 'acquire', 'acquire_line' + - flux: 'flux', 'flux_line' + + This is so that the Qubit parameters are assigned into the correct signal lines in + calibration. + parameters: Parameters associated with the qubit. + """ + signal_type_map = { + SignalType.DRIVE: ["drive", "drive_line"], + SignalType.DRIVE_EF: ["drive_ef", "drive_line_ef"], + SignalType.MEASURE: ["measure", "measure_line"], + SignalType.ACQUIRE: ["acquire", "acquire_line"], + SignalType.FLUX: ["flux", "flux_line"], + } + return cls._from_logical_signal_group( + cls, + uid=uid, + lsg=lsg, + parameters=parameters, + signal_type_map=signal_type_map, + ) + + def calibration(self, set_local_oscillators=True) -> Calibration: + """Generate calibration from the parameters and attached signal lines. + + `Qubit` requires `parameters` for it to be able to produce calibration objects. + + Args: + set_local_oscillators: if True, adds local oscillator settings to the calibration. + + Returns: + Prefilled calibration object from Qubit parameters. + """ + + if set_local_oscillators: + drive_lo = Oscillator( + uid=f"{self.uid}_drive_local_osc", + frequency=self.parameters.drive_lo_frequency, + ) + readout_lo = Oscillator( + uid=f"{self.uid}_readout_local_osc", + frequency=self.parameters.readout_lo_frequency, + ) + else: + drive_lo = None + readout_lo = None + + calib = {} + if "drive" in self.signals: + calib[self.signals["drive"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_drive_ge_osc", + frequency=self.parameters.drive_frequency_ge, + modulation_type=ModulationType.HARDWARE, + ), + local_oscillator=drive_lo, + range=self.parameters.drive_range, + ) + if "drive_ef" in self.signals: + calib[self.signals["drive_ef"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_drive_ef_osc", + frequency=self.parameters.drive_frequency_ef, + modulation_type=ModulationType.HARDWARE, + ), + local_oscillator=drive_lo, + range=self.parameters.drive_range, + ) + if "measure" in self.signals: + calib[self.signals["measure"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_measure_osc", + frequency=self.parameters.readout_frequency, + modulation_type=ModulationType.SOFTWARE, + ), + local_oscillator=readout_lo, + range=self.parameters.readout_range_out, + ) + if "acquire" in self.signals: + calib[self.signals["acquire"]] = SignalCalibration( + oscillator=Oscillator( + uid=f"{self.uid}_acquire_osc", + frequency=self.parameters.readout_frequency, + modulation_type=ModulationType.SOFTWARE, + ), + local_oscillator=readout_lo, + range=self.parameters.readout_range_in, + port_delay=self.parameters.readout_integration_delay, + ) + if "flux" in self.signals: + calib[self.signals["flux"]] = SignalCalibration( + voltage_offset=self.parameters.flux_offset_voltage, + ) + return Calibration(calib) diff --git a/laboneq/dsl/result/acquired_result.py b/laboneq/dsl/result/acquired_result.py index 2fbca0e..bd9584a 100644 --- a/laboneq/dsl/result/acquired_result.py +++ b/laboneq/dsl/result/acquired_result.py @@ -1,55 +1,5 @@ # Copyright 2022 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -from __future__ import annotations - -from dataclasses import dataclass, field -from typing import List, Union - -import numpy as np -from numpy.typing import ArrayLike - -from laboneq.dsl.dsl_dataclass_decorator import classformatter - - -def _compare_nested(a, b): - if isinstance(a, list) or isinstance(a, np.ndarray): - if not (isinstance(b, list) or isinstance(b, np.ndarray)): - return False - if not len(a) == len(b): - return False - return all(map(lambda x: _compare_nested(x[0], x[1]), zip(a, b))) - return a == b - - -@classformatter -@dataclass(init=True, repr=True, order=True) -class AcquiredResult: - """ - This class represents the results acquired for an 'acquire' event. - - The acquired result is a triple consisting of actual data, axis name(s) - and one or more axes - """ - - #: A multidimensional numpy array, where each dimension corresponds to a sweep loop - #: nesting level, the outermost sweep being the first dimension. - data: ArrayLike = field(default=None) - - #: A list of axis names. Each element may be either a string or a list of strings. - axis_name: List[Union[str, List[str]]] = field(default=None) - - #: A list of axis grids. Each element may be either a 1D numpy array or a list of - #: such arrays. - axis: List[Union[ArrayLike, List[ArrayLike]]] = field(default=None) - - #: A list of axis indices that represent the last measured near-time point. Only - #: covers outer near-time dimensions. - last_nt_step: List[int] = field(default=None) - - def __eq__(self, other: AcquiredResult): - return ( - _compare_nested(self.data, other.data) - and self.axis_name == other.axis_name - and _compare_nested(self.axis, other.axis) - ) +# Backwards compatibility +from laboneq.data.experiment_results import AcquiredResult # noqa: F401 diff --git a/laboneq/dsl/serialization/serializer.py b/laboneq/dsl/serialization/serializer.py index 8e12a52..cdf5877 100644 --- a/laboneq/dsl/serialization/serializer.py +++ b/laboneq/dsl/serialization/serializer.py @@ -67,7 +67,6 @@ def to_json(serializable_object) -> str: if isinstance(serializable_object, dict): json_dump = orjson.dumps(serializable_object, option=options) else: - entity_classes, entity_mapper = Serializer._entity_config() json_struct = serialize_to_dict_with_ref( @@ -102,13 +101,16 @@ def _classes_by_short_name(): "laboneq.dsl.parameter", "laboneq.dsl.calibration", "laboneq.dsl.device", - "laboneq.dsl.quantum.qubits", - "laboneq.dsl.quantum.quantum_operations", + "laboneq.dsl.quantum.quantum_element", + "laboneq.dsl.quantum.quantum_operation", + "laboneq.dsl.quantum.qubit", + "laboneq.dsl.quantum.transmon", "laboneq.dsl.device.server", "laboneq.dsl.device.servers.data_server", "laboneq.core.types.enums", "laboneq.core.types.compiled_experiment", "laboneq.data.scheduled_experiment", + "laboneq.data.recipe", "laboneq.executor.executor", "laboneq.dsl.device.io_units.logical_signal", "laboneq.dsl.device.io_units.physical_channel", diff --git a/laboneq/dsl/session.py b/laboneq/dsl/session.py index b85f406..8668ae3 100644 --- a/laboneq/dsl/session.py +++ b/laboneq/dsl/session.py @@ -268,7 +268,7 @@ def compile( ) -> Optional[CompiledExperiment]: """Compiles the specified experiment and stores it in the compiled_experiment property. - Requires connected LabOneQ session (`session.connect()`) either with or without emulation mode. + Requires connected LabOne Q session (`session.connect()`) either with or without emulation mode. Args: experiment: Experiment instance that should be compiled. @@ -305,7 +305,7 @@ def run( ) -> Optional[Results]: """Executes the compiled experiment. - Requires connected LabOneQ session (`session.connect()`) either with or without emulation mode. + Requires connected LabOne Q session (`session.connect()`) either with or without emulation mode. If no experiment is specified, the last compiled experiment is run. If an experiment is specified, the provided experiment is assigned to the diff --git a/laboneq/implementation/compilation_service/compilation_service.py b/laboneq/implementation/compilation_service/compilation_service.py deleted file mode 100644 index 1458705..0000000 --- a/laboneq/implementation/compilation_service/compilation_service.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2023 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -import logging - -from laboneq.data.compilation_job import CompilationJob -from laboneq.data.scheduled_experiment import ScheduledExperiment -from laboneq.interfaces.compilation_service.compilation_service_api import ( - CompilationServiceAPI, -) - -_logger = logging.getLogger(__name__) - - -class CompilationService(CompilationServiceAPI): - """ - This the core implementation of the compilation service. - """ - - def __init__(self): - pass - - def submit_compilation_job(self, job: CompilationJob): - """ - Submit a compilation job. - """ - - return None - - def compilation_job_status(self, job_id: str): - """ - Get the status of a compilation job. - """ - return None - - def compilation_job_result(self, job_id: str) -> ScheduledExperiment: - """ - Get the result of a compilation job. Blocks until the result is available. - """ - - return ScheduledExperiment( - recipe={ - "experiment": {"initializations": [], "realtime_execution_init": []} - } - ) diff --git a/laboneq/implementation/compilation_service/compilation_service_legacy.py b/laboneq/implementation/compilation_service/compilation_service_legacy.py index a01dcd3..632f4cb 100644 --- a/laboneq/implementation/compilation_service/compilation_service_legacy.py +++ b/laboneq/implementation/compilation_service/compilation_service_legacy.py @@ -100,7 +100,7 @@ def convert_to_experiment_json(job: CompilationJob): }, } ) - for osc in signal.oscillators: + if osc := signal.oscillator is not None: oscillators_in_job[osc.uid] = osc if osc.is_hardware: device_oscillators.setdefault(signal.device.uid, []).append(osc) @@ -135,12 +135,14 @@ def convert_to_experiment_json(job: CompilationJob): { "id": s.uid, "signal_type": signal_type_mapping[s.type], - "oscillators_list": [{"$ref": o.uid} for o in s.oscillators], + "oscillators_list": [{"$ref": s.oscillator.uid}] + if s.oscillator is not None + else [], } for s in job.experiment_info.signals ] for s in retval["signals"]: - if s["oscillators_list"] == []: + if not len(s["oscillators_list"]): del s["oscillators_list"] else: s["modulation"] = True diff --git a/laboneq/implementation/data_storage/__init__.py b/laboneq/implementation/data_storage/__init__.py index 17c557a..62c18fd 100644 --- a/laboneq/implementation/data_storage/__init__.py +++ b/laboneq/implementation/data_storage/__init__.py @@ -1,2 +1,4 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 + +from laboneq.implementation.data_storage.laboneq_database import DataStore diff --git a/laboneq/implementation/data_storage/l1q_database_wrapper.py b/laboneq/implementation/data_storage/laboneq_database.py similarity index 58% rename from laboneq/implementation/data_storage/l1q_database_wrapper.py rename to laboneq/implementation/data_storage/laboneq_database.py index ab3a0ad..45feff9 100644 --- a/laboneq/implementation/data_storage/l1q_database_wrapper.py +++ b/laboneq/implementation/data_storage/laboneq_database.py @@ -1,15 +1,16 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -from laboneq.implementation.data_storage_service.data_storage_service_sqlite_dict import ( +from laboneq.implementation.data_storage.services.sqlite_dict import ( DataStorageServiceSqliteDict, ) from laboneq.interfaces.data_storage.data_storage_api import DataStorageAPI -class L1QDatabase(DataStorageAPI): - """This is proxy object to access the data DataStorageServiceSqliteDict. It is used to give the user simple access to the data storage api. - This class is included in the simple.py so that the user can access it using 'from laboneq.simple import *' +class DataStore(DataStorageAPI): + """Proxy object to access the data base. + + Defaults to `DataStorageServiceSqliteDict`. """ def __init__(self, file_path=None): diff --git a/laboneq/implementation/data_storage_service/__init__.py b/laboneq/implementation/data_storage/services/__init__.py similarity index 100% rename from laboneq/implementation/data_storage_service/__init__.py rename to laboneq/implementation/data_storage/services/__init__.py diff --git a/laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py b/laboneq/implementation/data_storage/services/sqlite_dict.py similarity index 99% rename from laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py rename to laboneq/implementation/data_storage/services/sqlite_dict.py index 3c3e594..f6fa20b 100644 --- a/laboneq/implementation/data_storage_service/data_storage_service_sqlite_dict.py +++ b/laboneq/implementation/data_storage/services/sqlite_dict.py @@ -58,7 +58,7 @@ def store( metadata: Optional[Dict[str, Any]] = None, ) -> str: """ - Store data in the database. Only data that can be serialized with the L1Q serializer can be stored. + Store data in the database. Only data that can be serialized with the LabOne Q serializer can be stored. Args: key (str): The key to store the data under. diff --git a/laboneq/implementation/experiment_workflow/device_setup_generator.py b/laboneq/implementation/experiment_workflow/device_setup_generator.py index a1fa3a4..1be8c99 100644 --- a/laboneq/implementation/experiment_workflow/device_setup_generator.py +++ b/laboneq/implementation/experiment_workflow/device_setup_generator.py @@ -1,35 +1,33 @@ # Copyright 2022 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + import itertools import logging import warnings +from collections import UserDict from typing import Dict, List, Optional, Tuple, Union from laboneq.core.exceptions.laboneq_exception import LabOneQException from laboneq.data.setup_description import ( - Connection, + ChannelMapEntry, DeviceType, Instrument, - IOSignalType, LogicalSignal, LogicalSignalGroup, PhysicalChannel, PhysicalChannelType, - Port, Server, Setup, SetupInternalConnection, ) +from laboneq.implementation.legacy_adapters import device_setup_converter as converter _logger = logging.getLogger(__name__) PATH_SEPARATOR = "/" -LogicalSignalGroups_Path = "logical_signal_groups" -LogicalSignalGroups_Path_Abs = PATH_SEPARATOR + LogicalSignalGroups_Path - - # Terminal Symbols T_HDAWG_DEVICE = "HDAWG" T_UHFQA_DEVICE = "UHFQA" @@ -57,13 +55,6 @@ T_PORT = "port" T_PORTS = "ports" -SIGNAL_TYPE_DIRECTORY = { - T_IQ_SIGNAL: IOSignalType.IQ, - T_ACQUIRE_SIGNAL: IOSignalType.IQ, - T_RF_SIGNAL: IOSignalType.RF, - T_TO: IOSignalType.DIO, -} - # Models 'instruments' (former 'instrument_list') part of the descriptor: # instruments: @@ -103,15 +94,6 @@ DataServersType = Dict[str, Dict[str, Union[str, List[str]]]] -def _iterate_over_descriptors_of_type(instruments: InstrumentsType, device_type: str): - for descriptor in instruments.get(device_type, []): - yield descriptor[T_UID], descriptor[T_ADDRESS], descriptor.get(T_INTERFACE) - - -def _skip_nones(**kwargs): - return {k: v for k, v in kwargs.items() if v is not None} - - def _port_decoder(port_desc, additional_switch_keys=None) -> Tuple[str, str, List[str]]: if additional_switch_keys is None: additional_switch_keys = [] @@ -160,7 +142,7 @@ def _port_decoder(port_desc, additional_switch_keys=None) -> Tuple[str, str, Lis ) if signal_type_keyword in signal_keys: - remote_path = PATH_SEPARATOR.join(["", "logical_signal_groups", remote_path]) + remote_path = PATH_SEPARATOR.join(["", remote_path]) if port_desc: raise LabOneQException(f"Unknown keyword found: {list(port_desc.keys())[0]}") @@ -168,16 +150,6 @@ def _port_decoder(port_desc, additional_switch_keys=None) -> Tuple[str, str, Lis return signal_type_keyword, remote_path, local_ports -def _path_to_signal(path): - if PATH_SEPARATOR in path: - split_path = path.split(PATH_SEPARATOR) - if split_path[1] == LogicalSignalGroups_Path: - return split_path[2], split_path[3] - else: - return split_path[0], split_path[1] - return None - - def _create_physical_channel( ports: List[str], signal_type_token: str, device_id, physical_signals ) -> Optional[PhysicalChannel]: @@ -195,27 +167,68 @@ def _create_physical_channel( for group in itertools.groupby([x for y in zip(*split_ports) for x in y]) ) ).lower() - + if not signal_name: + return if device_id not in physical_signals: physical_signals[device_id] = [] else: other_signal: PhysicalChannel = next( - (ps for ps in physical_signals[device_id] if ps.uid == signal_name), None + (ps for ps in physical_signals[device_id] if ps.name == signal_name), None ) if other_signal is not None: return other_signal - physical_channel = PhysicalChannel(uid=f"{signal_name}", type=channel_type) + physical_channel = PhysicalChannel(name=f"{signal_name}", type=channel_type) physical_signals[device_id].append(physical_channel) return physical_channel +class DescriptorLogicalSignals(UserDict): + def __init__(self, data: Dict) -> Dict[str, LogicalSignalGroup]: + super().__init__(data) + self.data = self._generate_logical_signal_groups(data) + + def _generate_logical_signal_groups( + self, data: Dict + ) -> Dict[str, LogicalSignalGroup]: + logical_signals_candidates = [] + + for conns in data.values(): + for conn in conns: + _, remote_path, _ = _port_decoder(conn) + if PATH_SEPARATOR in remote_path: + logical_signals_candidates.append( + { + "lsg_uid": remote_path.split(PATH_SEPARATOR)[1], + "signal_id": remote_path.split(PATH_SEPARATOR)[2], + } + ) + + logical_signal_groups = {} + for lsg_uid in set([ls["lsg_uid"] for ls in logical_signals_candidates]): + signals = [ + LogicalSignal( + name=ls["signal_id"], + group=ls["lsg_uid"], + ) + for ls in logical_signals_candidates + if ls["lsg_uid"] == lsg_uid + ] + + lsg = LogicalSignalGroup(lsg_uid, {ls.name: ls for ls in signals}) + logical_signal_groups[lsg.uid] = lsg + return logical_signal_groups + + def get_logical_signal(self, group: str, name: str) -> LogicalSignal: + return self.data[group].logical_signals[name] + + class DeviceSetupGenerator: @staticmethod def from_descriptor( yaml_text: str, server_host: str = None, - server_port: str = None, + server_port: int | str = None, setup_name: str = None, ): from yaml import load @@ -275,7 +288,7 @@ def from_dicts( connections: ConnectionsType = None, dataservers: DataServersType = None, server_host: str = None, - server_port: str = None, + server_port: int | str = None, setup_name: str = None, ): if instrument_list is not None: @@ -320,13 +333,12 @@ def from_dicts( "At least one server must be defined either in the descriptor or in the constructor." ) - # Construct servers servers = [ ( Server( uid=server_uid, host=server_def["host"], - port=server_def.get("port", 8004), + port=int(server_def.get("port", 8004)), api_level=6, ), server_def.get("instruments", []), @@ -366,57 +378,21 @@ def server_finder(device_uid: str) -> str: out_instruments: List[Instrument] = [] for it, il in {**instrument_list, **instruments}.items(): for instrument_def in il: + legacy_ports = converter.legacy_instrument_ports(DeviceType[it]) instrument = Instrument( uid=instrument_def[T_UID], device_type=DeviceType[it], server=server_finder(instrument_def[T_UID]), address=instrument_def.get(T_ADDRESS, ""), + ports=[converter.convert_instrument_port(x) for x in legacy_ports], ) out_instruments.append(instrument) instruments_by_uid = {i.uid: i for i in out_instruments} - logical_signals_candidates = [] - logical_signal_groups = [] physical_signals = {} setup_internal_connections = [] - - for device_uid, conns in connections.items(): - instrument = instruments_by_uid[device_uid] - for conn in conns: - signal_type_keyword, remote_path, local_ports = _port_decoder(conn) - if PATH_SEPARATOR in remote_path: - logical_signals_candidates.append( - { - "lsg_uid": remote_path.split(PATH_SEPARATOR)[2], - "signal_id": remote_path.split(PATH_SEPARATOR)[3], - } - ) - - def ls_path_from_parts(lsg_uid, signal_id): - return f"{LogicalSignalGroups_Path_Abs}/{lsg_uid}/{signal_id}" - - ls_by_path = {} - logical_signal_group_uids = set( - [ls["lsg_uid"] for ls in logical_signals_candidates] - ) - for lsg_uid in logical_signal_group_uids: - signals = [ - LogicalSignal( - uid=f"{ls['signal_id']}", - name=ls["signal_id"], - path=ls_path_from_parts(lsg_uid, ls["signal_id"]), - ) - for ls in logical_signals_candidates - if ls["lsg_uid"] == lsg_uid - ] - ls_by_path = {**ls_by_path, **{ls.path: ls for ls in signals}} - - logical_signal_groups.append(LogicalSignalGroup(lsg_uid, signals)) - - for lsg in logical_signal_groups: - lsg.logical_signals = {ls.uid: ls for ls in lsg.logical_signals} - logical_signal_groups = {lsg.uid: lsg for lsg in logical_signal_groups} + logical_signal_groups = DescriptorLogicalSignals(connections) # Define connections for device_uid, conns in connections.items(): @@ -424,64 +400,77 @@ def ls_path_from_parts(lsg_uid, signal_id): instrument = instruments_by_uid[device_uid] for conn in conns: signal_type_keyword, remote_path, local_ports = _port_decoder(conn) + # TODO (MH): Device processors if signal_type_keyword == T_ACQUIRE_SIGNAL: if instrument.device_type == DeviceType.UHFQA: local_ports = ["QAS/0", "QAS/1"] logical_signal = None if PATH_SEPARATOR in remote_path: - logical_signal_id = { - "lsg_uid": remote_path.split(PATH_SEPARATOR)[2], - "signal_id": remote_path.split(PATH_SEPARATOR)[3], - } - logical_signal = ls_by_path[ls_path_from_parts(**logical_signal_id)] + lsg = remote_path.split(PATH_SEPARATOR)[1] + signal_id = remote_path.split(PATH_SEPARATOR)[2] + logical_signal = logical_signal_groups.get_logical_signal( + lsg, signal_id + ) physical_channel = _create_physical_channel( local_ports, signal_type_keyword, device_uid, physical_signals ) - if physical_channel is not None: - if physical_channel.uid in physical_channels_by_uid: + if physical_channel.name in physical_channels_by_uid: physical_channel = physical_channels_by_uid[ - physical_channel.uid + physical_channel.name ] else: physical_channels_by_uid[ - physical_channel.uid + physical_channel.name ] = physical_channel + if "ports" in conn: + for port in instrument.ports: + if port.path in conn["ports"]: + if not physical_channel.ports: + physical_channel.ports = [port] + if port not in physical_channel.ports: + physical_channel.ports.append(port) + else: + for l_port in local_ports: + for dev_port in instrument.ports: + if l_port == dev_port.path: + if not physical_channel.ports: + physical_channel.ports = [dev_port] + if port not in physical_channel.ports: + physical_channel.ports.append(dev_port) instrument.physical_channels.append(physical_channel) if logical_signal is not None: instrument.connections.append( - Connection( + ChannelMapEntry( logical_signal=logical_signal, physical_channel=physical_channel, ) ) - for i, p in enumerate(local_ports): - if p not in [port.path for port in instrument.ports]: - current_port = Port(path=p, physical_channel=physical_channel) - instrument.ports.append(current_port) - else: - current_port = next( - port for port in instrument.ports if port.path == p - ) - + if "port" in conn: if signal_type_keyword == T_TO: - setup_internal_connections.append( - SetupInternalConnection( - from_instrument=instrument, - from_port=current_port, - to_instrument=instruments_by_uid[remote_path], - ) - ) + if remote_path in instruments_by_uid: + for instr_port in instrument.ports: + if conn["port"] in instr_port.path: + setup_internal_connections.append( + SetupInternalConnection( + from_instrument=instrument, + from_port=instr_port, + to_instrument=instruments_by_uid[ + remote_path + ], + to_port=None, + ) + ) + break servers = {s.uid: s for s, _ in servers} device_setup_constructor_args = { "uid": setup_name, "servers": servers, "instruments": out_instruments, - "logical_signal_groups": logical_signal_groups, + "logical_signal_groups": logical_signal_groups.data, "setup_internal_connections": setup_internal_connections, } - return Setup(**device_setup_constructor_args) diff --git a/laboneq/implementation/experiment_workflow/experiment_workflow.py b/laboneq/implementation/experiment_workflow/experiment_workflow.py index 6d50b22..cdea5f2 100644 --- a/laboneq/implementation/experiment_workflow/experiment_workflow.py +++ b/laboneq/implementation/experiment_workflow/experiment_workflow.py @@ -1,6 +1,8 @@ # Copyright 2020 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + import copy import logging from typing import Dict @@ -8,8 +10,8 @@ from laboneq.data.data_helper import DataHelper from laboneq.data.execution_payload import ExecutionPayload from laboneq.data.experiment_description import Experiment +from laboneq.data.experiment_results import ExperimentResults from laboneq.data.setup_description import Setup -from laboneq.data.setup_description.setup_helper import SetupHelper from laboneq.implementation.experiment_workflow.device_setup_generator import ( DeviceSetupGenerator, ) @@ -72,7 +74,9 @@ def current_experiment(self): """ return copy.deepcopy(self._current_experiment) - def run_current_experiment(self, setup: Setup, signal_mappings: Dict[str, str]): + def run_current_experiment( + self, setup: Setup, signal_mappings: Dict[str, str] + ) -> ExperimentResults: """ Run the current experiment. """ @@ -122,7 +126,7 @@ def device_setup_from_descriptor( self, yaml_text: str, server_host: str = None, - server_port: str = None, + server_port: str | int = None, setup_name: str = None, ) -> Setup: """ @@ -156,10 +160,6 @@ def map_signals(self, signal_mappings: Dict[str, str]): Map experiment signals to logical signals. """ self._signal_mappings = {} - logical_signals_by_path = { - ls[1].path: ls[1] - for ls in SetupHelper.flat_logical_signals(self._current_setup) - } _logger.info( f"Mapping signals, experiment signals: {self._current_experiment.signals}" ) @@ -168,5 +168,11 @@ def map_signals(self, signal_mappings: Dict[str, str]): } for k, v in signal_mappings.items(): experiment_signal = experiment_signals_by_uid[k] - logical_signal = logical_signals_by_path[v] - self._signal_mappings[experiment_signal.uid] = logical_signal.path + grp, ls_name = v.split("/") + + if grp in self._current_setup.logical_signal_groups: + if ( + ls_name + in self._current_setup.logical_signal_groups[grp].logical_signals + ): + self._signal_mappings[experiment_signal.uid] = v diff --git a/laboneq/implementation/legacy_adapters/__init.py__ b/laboneq/implementation/legacy_adapters/__init.py__ deleted file mode 100644 index e69de29..0000000 diff --git a/laboneq/implementation/legacy_adapters/converters_calibration/__init__.py b/laboneq/implementation/legacy_adapters/converters_calibration.py similarity index 99% rename from laboneq/implementation/legacy_adapters/converters_calibration/__init__.py rename to laboneq/implementation/legacy_adapters/converters_calibration.py index b684870..7281a32 100644 --- a/laboneq/implementation/legacy_adapters/converters_calibration/__init__.py +++ b/laboneq/implementation/legacy_adapters/converters_calibration.py @@ -42,7 +42,10 @@ # converter functions for data type package 'calibration' # AUTOGENERATED, DO NOT EDIT -from .post_process_calibration import post_process + + +def post_process(source, target): + return target def get_converter_function_calibration(orig): diff --git a/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py b/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py deleted file mode 100644 index 9c25299..0000000 --- a/laboneq/implementation/legacy_adapters/converters_calibration/post_process_calibration.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2023 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - - -def post_process(source, target): - return target diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py b/laboneq/implementation/legacy_adapters/converters_experiment_description.py similarity index 97% rename from laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py rename to laboneq/implementation/legacy_adapters/converters_experiment_description.py index 8b0d4a8..62d214f 100644 --- a/laboneq/implementation/legacy_adapters/converters_experiment_description/__init__.py +++ b/laboneq/implementation/legacy_adapters/converters_experiment_description.py @@ -4,9 +4,6 @@ from typing import Any as AnyDSL -from laboneq.core.types.enums.acquisition_type import ( - AcquisitionType as AcquisitionTypeDSL, -) from laboneq.core.types.enums.averaging_mode import AveragingMode as AveragingModeDSL from laboneq.core.types.enums.execution_type import ExecutionType as ExecutionTypeDSL from laboneq.core.types.enums.repetition_mode import RepetitionMode as RepetitionModeDSL @@ -16,7 +13,6 @@ from laboneq.data.experiment_description import Acquire as AcquireDATA from laboneq.data.experiment_description import AcquireLoopNt as AcquireLoopNtDATA from laboneq.data.experiment_description import AcquireLoopRt as AcquireLoopRtDATA -from laboneq.data.experiment_description import AcquisitionType as AcquisitionTypeDATA from laboneq.data.experiment_description import Any as AnyDATA from laboneq.data.experiment_description import AveragingMode as AveragingModeDATA from laboneq.data.experiment_description import Call as CallDATA @@ -25,13 +21,9 @@ from laboneq.data.experiment_description import ExecutionType as ExecutionTypeDATA from laboneq.data.experiment_description import Experiment as ExperimentDATA from laboneq.data.experiment_description import ExperimentSignal as ExperimentSignalDATA -from laboneq.data.experiment_description import ( - LinearSweepParameter as LinearSweepParameterDATA, -) from laboneq.data.experiment_description import Match as MatchDATA from laboneq.data.experiment_description import Operation as OperationDATA from laboneq.data.experiment_description import Optional as OptionalDATA -from laboneq.data.experiment_description import Parameter as ParameterDATA from laboneq.data.experiment_description import PlayPulse as PlayPulseDATA from laboneq.data.experiment_description import Pulse as PulseDATA from laboneq.data.experiment_description import PulseFunctional as PulseFunctionalDATA @@ -48,7 +40,9 @@ SignalCalibration as SignalCalibrationDATA, ) from laboneq.data.experiment_description import Sweep as SweepDATA -from laboneq.data.experiment_description import SweepParameter as SweepParameterDATA +from laboneq.data.parameter import LinearSweepParameter as LinearSweepParameterDATA +from laboneq.data.parameter import Parameter as ParameterDATA +from laboneq.data.parameter import SweepParameter as SweepParameterDATA from laboneq.dsl.calibration.signal_calibration import ( SignalCalibration as SignalCalibrationDSL, ) @@ -111,14 +105,6 @@ def get_converter_function_experiment_description(orig): return converter_function_directory.get(orig) -def convert_AcquisitionType(orig: AcquisitionTypeDSL): - return ( - next(e for e in AcquisitionTypeDATA if e.name == orig.name) - if orig is not None - else None - ) - - def convert_AveragingMode(orig: AveragingModeDSL): return ( next(e for e in AveragingModeDATA if e.name == orig.name) @@ -198,7 +184,7 @@ def convert_AcquireLoopRt(orig: AcquireLoopRtDSL): if orig is None: return None retval = AcquireLoopRtDATA() - retval.acquisition_type = convert_AcquisitionType(orig.acquisition_type) + retval.acquisition_type = orig.acquisition_type retval.averaging_mode = convert_AveragingMode(orig.averaging_mode) retval.count = orig.count retval.execution_type = convert_ExecutionType(orig.execution_type) @@ -339,6 +325,7 @@ def convert_Match(orig: MatchDSL): return None retval = MatchDATA() retval.handle = orig.handle + retval.user_register = orig.user_register retval.local = orig.local retval.uid = orig.uid return post_process( diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py b/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py deleted file mode 100644 index 6bdb398..0000000 --- a/laboneq/implementation/legacy_adapters/converters_experiment_results/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2023 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - - -from laboneq.data.experiment_results import AcquiredResult as AcquiredResultDATA -from laboneq.dsl.result.acquired_result import AcquiredResult as AcquiredResultDSL -from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic - -# converter functions for data type package 'experiment_results' -# AUTOGENERATED, DO NOT EDIT -from .post_process_experiment_results import post_process - - -def get_converter_function_experiment_results(orig): - converter_function_directory = { - AcquiredResultDSL: convert_AcquiredResult, - } - return converter_function_directory.get(orig) - - -def convert_AcquiredResult(orig: AcquiredResultDSL): - if orig is None: - return None - retval = AcquiredResultDATA() - retval.axis = convert_dynamic( - orig.axis, - source_type_string="List[Union[ArrayLike, List[ArrayLike]]]", - target_type_string="Any", - orig_is_collection=True, - conversion_function_lookup=get_converter_function_experiment_results, - ) - retval.axis_name = convert_dynamic( - orig.axis_name, - source_type_string="List[Union[str, List[str]]]", - target_type_string="List", - orig_is_collection=True, - conversion_function_lookup=get_converter_function_experiment_results, - ) - retval.data = convert_dynamic( - orig.data, - source_type_string="ArrayLike", - target_type_string="ArrayLike", - orig_is_collection=True, - conversion_function_lookup=get_converter_function_experiment_results, - ) - retval.last_nt_step = convert_dynamic( - orig.last_nt_step, - source_type_string="List[int]", - target_type_string="List[int]", - orig_is_collection=True, - conversion_function_lookup=get_converter_function_experiment_results, - ) - return post_process( - orig, - retval, - conversion_function_lookup=get_converter_function_experiment_results, - ) diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py b/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py deleted file mode 100644 index 9c25299..0000000 --- a/laboneq/implementation/legacy_adapters/converters_experiment_results/post_process_experiment_results.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright 2023 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - - -def post_process(source, target): - return target diff --git a/laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py b/laboneq/implementation/legacy_adapters/converters_setup_description.py similarity index 93% rename from laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py rename to laboneq/implementation/legacy_adapters/converters_setup_description.py index a43030d..1c120e9 100644 --- a/laboneq/implementation/legacy_adapters/converters_setup_description/__init__.py +++ b/laboneq/implementation/legacy_adapters/converters_setup_description.py @@ -5,13 +5,13 @@ from typing import Any as AnyDSL from laboneq.core.types.enums.io_direction import IODirection as IODirectionDSL -from laboneq.core.types.enums.io_signal_type import IOSignalType as IOSignalTypeDSL from laboneq.core.types.enums.port_mode import PortMode as PortModeDSL from laboneq.core.types.enums.reference_clock_source import ( ReferenceClockSource as ReferenceClockSourceDSL, ) +from laboneq.data.calibration import PortMode as PortModeDATA from laboneq.data.setup_description import Any as AnyDATA -from laboneq.data.setup_description import Connection as ConnectionDATA +from laboneq.data.setup_description import ChannelMapEntry as ConnectionDATA from laboneq.data.setup_description import Instrument as HDAWGDATA from laboneq.data.setup_description import Instrument as InstrumentDATA from laboneq.data.setup_description import Instrument as PQSCDATA @@ -19,7 +19,6 @@ from laboneq.data.setup_description import Instrument as SHFSGDATA from laboneq.data.setup_description import Instrument as UHFQADATA from laboneq.data.setup_description import IODirection as IODirectionDATA -from laboneq.data.setup_description import IOSignalType as IOSignalTypeDATA from laboneq.data.setup_description import LogicalSignal as LogicalSignalDATA from laboneq.data.setup_description import LogicalSignalGroup as LogicalSignalGroupDATA from laboneq.data.setup_description import PhysicalChannel as PhysicalChannelDATA @@ -27,7 +26,6 @@ PhysicalChannelType as PhysicalChannelTypeDATA, ) from laboneq.data.setup_description import Port as PortDATA -from laboneq.data.setup_description import PortMode as PortModeDATA from laboneq.data.setup_description import QuantumElement as QuantumElementDATA from laboneq.data.setup_description import Qubit as QubitDATA from laboneq.data.setup_description import ( @@ -57,8 +55,8 @@ from laboneq.dsl.device.ports import Port as PortDSL from laboneq.dsl.device.server import Server as ServerDSL from laboneq.dsl.device.servers.data_server import DataServer as DataServerDSL -from laboneq.dsl.quantum.qubits import QuantumElement as QuantumElementDSL -from laboneq.dsl.quantum.qubits import Qubit as QubitDSL +from laboneq.dsl.quantum.quantum_element import QuantumElement as QuantumElementDSL +from laboneq.dsl.quantum.qubit import Qubit as QubitDSL from laboneq.implementation.legacy_adapters.dynamic_converter import convert_dynamic # converter functions for data type package 'setup_description' @@ -96,14 +94,6 @@ def convert_IODirection(orig: IODirectionDSL): ) -def convert_IOSignalType(orig: IOSignalTypeDSL): - return ( - next(e for e in IOSignalTypeDATA if e.name == orig.name) - if orig is not None - else None - ) - - def convert_PhysicalChannelType(orig: PhysicalChannelTypeDSL): return ( next(e for e in PhysicalChannelTypeDATA if e.name == orig.name) @@ -131,7 +121,7 @@ def convert_ReferenceClockSource(orig: ReferenceClockSourceDSL): def convert_Connection(orig: ConnectionDSL): if orig is None: return None - retval = ConnectionDATA() + retval = ConnectionDATA(None, None) return post_process( orig, retval, @@ -239,11 +229,7 @@ def convert_Instrument(orig: InstrumentDSL): def convert_LogicalSignal(orig: LogicalSignalDSL): if orig is None: return None - retval = LogicalSignalDATA() - retval.direction = convert_IODirection(orig.direction) - retval.name = orig.name - retval.path = orig.path - retval.uid = orig.uid + retval = LogicalSignalDATA(name=orig.name, group=orig.uid.split("/")[0]) return post_process( orig, retval, diff --git a/laboneq/dsl/new_arch_support.py b/laboneq/implementation/legacy_adapters/converters_target_setup.py similarity index 99% rename from laboneq/dsl/new_arch_support.py rename to laboneq/implementation/legacy_adapters/converters_target_setup.py index b8fcfc1..fee0f05 100644 --- a/laboneq/dsl/new_arch_support.py +++ b/laboneq/implementation/legacy_adapters/converters_target_setup.py @@ -29,7 +29,7 @@ def convert_dsl_to_target_setup(device_setup: DeviceSetup) -> TargetSetup: server_uid: TargetServer( uid=server_uid, address=server.host, - port=server.port, + port=int(server.port), server_type=ServerType.DATA_SERVER, api_level=server.api_level, ) diff --git a/laboneq/implementation/legacy_adapters/device_setup_converter.py b/laboneq/implementation/legacy_adapters/device_setup_converter.py new file mode 100644 index 0000000..a0c3e00 --- /dev/null +++ b/laboneq/implementation/legacy_adapters/device_setup_converter.py @@ -0,0 +1,43 @@ +# Copyright 2023 Zurich Instruments AG +# SPDX-License-Identifier: Apache-2.0 + +"""Module to convert LabOne Q DSL structures into other data types.""" +from typing import List + +from laboneq.core.types import enums as legacy_enums +from laboneq.data import setup_description as setup +from laboneq.dsl import device as legacy_device +from laboneq.dsl.device import instruments as legacy_instruments + + +def legacy_instrument_ports(device_type: setup.DeviceType) -> List[legacy_device.Port]: + if device_type == setup.DeviceType.HDAWG: + return legacy_instruments.HDAWG().ports + if device_type == setup.DeviceType.SHFQA: + return legacy_instruments.SHFQA().ports + if device_type == setup.DeviceType.PQSC: + return legacy_instruments.PQSC().ports + if device_type == setup.DeviceType.UHFQA: + return legacy_instruments.UHFQA().ports + if device_type == setup.DeviceType.SHFSG: + return legacy_instruments.SHFSG().ports + if device_type == setup.DeviceType.SHFQC: + return legacy_instruments.SHFSG().ports + legacy_instruments.SHFQA().ports + if device_type == setup.DeviceType.NonQC: + return legacy_instruments.NonQC().ports + raise NotImplementedError("No port converter for ", device_type) + + +def legacy_signal_to_port_type(signal: legacy_enums.IOSignalType) -> setup.PortType: + if signal == legacy_enums.IOSignalType.DIO: + return setup.PortType.DIO + elif signal == legacy_enums.IOSignalType.ZSYNC: + return setup.PortType.ZSYNC + else: + return setup.PortType.RF + + +def convert_instrument_port(legacy: legacy_device.Port) -> setup.Port: + return setup.Port( + path=legacy.uid, type=legacy_signal_to_port_type(legacy.signal_type) + ) diff --git a/laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py b/laboneq/implementation/legacy_adapters/legacy_dsl_adapters.py similarity index 96% rename from laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py rename to laboneq/implementation/legacy_adapters/legacy_dsl_adapters.py index 82d8051..d1b2a29 100644 --- a/laboneq/implementation/legacy_adapters/legacy_dsl_adapters/__init__.py +++ b/laboneq/implementation/legacy_adapters/legacy_dsl_adapters.py @@ -1,7 +1,6 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -import laboneq.dsl.experiment.pulse_library from laboneq.application_management.application_manager import ApplicationManager from laboneq.data.execution_payload import ExecutionPayload from laboneq.data.experiment_description import ( @@ -9,7 +8,6 @@ ExecutionType, Experiment, PlayPulse, - PulseFunctional, Section, Sweep, ) @@ -137,7 +135,9 @@ def map_signal(self, experiment_signal_uid: str, logical_signal): raise ValueError( "Signal {} not found in experiment".format(experiment_signal_uid) ) - self.signal_mappings[experiment_signal_uid] = logical_signal.path + self.signal_mappings[experiment_signal_uid] = ( + logical_signal.group + "/" + logical_signal.name + ) def sweep(self, uid=None, parameter=None): section = Sweep(uid=uid, parameters=[parameter]) @@ -268,11 +268,3 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): self.exp._pop_and_add_section() - - -class pulse_library: - @staticmethod - def const(uid=None, length=None, amplitude=None): - return PulseFunctional( - uid=uid, length=length, amplitude=amplitude, function="const" - ) diff --git a/laboneq/implementation/legacy_adapters/converters_experiment_description/post_process_experiment_description.py b/laboneq/implementation/legacy_adapters/post_process_experiment_description.py similarity index 100% rename from laboneq/implementation/legacy_adapters/converters_experiment_description/post_process_experiment_description.py rename to laboneq/implementation/legacy_adapters/post_process_experiment_description.py diff --git a/laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py b/laboneq/implementation/legacy_adapters/post_process_setup_description.py similarity index 88% rename from laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py rename to laboneq/implementation/legacy_adapters/post_process_setup_description.py index 8b5aaa0..314d62d 100644 --- a/laboneq/implementation/legacy_adapters/converters_setup_description/post_process_setup_description.py +++ b/laboneq/implementation/legacy_adapters/post_process_setup_description.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from laboneq.data.setup_description import ( - Connection, + ChannelMapEntry, DeviceType, LogicalSignalGroup, PhysicalChannel, @@ -38,9 +38,7 @@ def post_process(source, target, conversion_function_lookup): post_process_setup(source, target) if type(target) == LogicalSignalGroup: - target.logical_signals = { - ls.uid.split("/")[1]: ls for ls in target.logical_signals - } + target.logical_signals = {ls.group: ls for ls in target.logical_signals} return target @@ -84,7 +82,7 @@ def post_process_setup(dsl_setup, data_setup): all_ls = {} for lsg in data_setup.logical_signal_groups: for ls in lsg.logical_signals.values(): - all_ls[ls.path] = ls + all_ls[ls.group + "/" + ls.name] = ls for i in data_setup.instruments: server_uid = i.server @@ -93,7 +91,7 @@ def post_process_setup(dsl_setup, data_setup): i.physical_channels = [ PhysicalChannel( - uid=pc.name, + name=pc.name, type=PhysicalChannelType.IQ_CHANNEL if pc.type == PhysicalChannelTypeDSL.IQ_CHANNEL else PhysicalChannelType.RF_CHANNEL, @@ -123,17 +121,23 @@ def post_process_setup(dsl_setup, data_setup): ( pc for pc in i.physical_channels - if pc.uid == pc_of_connection.name + if pc.name == pc_of_connection.name ), None, ) + from laboneq.implementation.legacy_adapters import ( + device_setup_converter as converter, + ) - current_port = Port(path=c.local_port, physical_channel=pc_of_connection) + current_port = Port( + path=c.local_port, + type=converter.legacy_signal_to_port_type(c.signal_type), + ) i.ports.append(current_port) if c.remote_path in all_ls and pc_of_connection is not None: i.connections.append( - Connection( + ChannelMapEntry( physical_channel=pc_of_connection, logical_signal=all_ls[c.remote_path], ) @@ -144,6 +148,7 @@ def post_process_setup(dsl_setup, data_setup): from_instrument=i, to_instrument=data_instrument_map[c.remote_path], from_port=current_port, + to_port=None, ) ) diff --git a/laboneq/implementation/legacy_adapters/simple2.py b/laboneq/implementation/legacy_adapters/simple2.py deleted file mode 100644 index 76b8ac2..0000000 --- a/laboneq/implementation/legacy_adapters/simple2.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2022 Zurich Instruments AG -# SPDX-License-Identifier: Apache-2.0 - -# ruff: noqa -""" -Convenience header for the LabOne Q project. -""" - -from laboneq.data.calibration import ( - CarrierType, - MixerCalibration, - ModulationType, - Oscillator, -) -from laboneq.data.experiment_description import ( - AcquisitionType, - AveragingMode, - ExperimentSignal, - LinearSweepParameter, -) -from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( - DeviceSetupAdapter as DeviceSetup, -) -from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( - ExperimentAdapter as Experiment, -) -from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( - LegacySessionAdapter as Session, -) -from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import ( - SignalCalibrationAdapter as SignalCalibration, -) -from laboneq.implementation.legacy_adapters.legacy_dsl_adapters import pulse_library diff --git a/laboneq/controller/recipe_1_4_0.py b/laboneq/implementation/payload_builder/convert_from_legacy_json_recipe.py similarity index 59% rename from laboneq/controller/recipe_1_4_0.py rename to laboneq/implementation/payload_builder/convert_from_legacy_json_recipe.py index cd0594f..e30c3d8 100644 --- a/laboneq/controller/recipe_1_4_0.py +++ b/laboneq/implementation/payload_builder/convert_from_legacy_json_recipe.py @@ -3,25 +3,40 @@ from __future__ import annotations -from dataclasses import dataclass, field -from typing import Any +from dataclasses import dataclass +from typing import Any, cast from marshmallow import EXCLUDE, Schema, fields, post_load -from .recipe_enums import ( - AcquisitionType, - NtStepKey, - RefClkType, - SignalType, - TriggeringMode, -) -from .util import LabOneQControllerException +from laboneq.data import recipe + + +@dataclass +class JsonRecipe: + line_endings: str + experiment: recipe.Recipe + servers: list[Any] | None = None + devices: list[Any] | None = None + + +explicit_mapping = { + "JsonRecipeLoader": JsonRecipe, + "Experiment": recipe.Recipe, + "Device": dict, +} + + +def convert_from_legacy_json_recipe(legacy_recipe: dict) -> recipe.Recipe: + return cast(JsonRecipe, JsonRecipeLoader().load(legacy_recipe)).experiment class QCCSSchema(Schema): @post_load def from_json(self, data, **kwargs): - return self.Data(**data) + cls = explicit_mapping.get(self.__class__.__name__) + if cls is None: + cls = getattr(recipe, self.__class__.__name__) + return cls(**data) class Server(QCCSSchema): @@ -29,13 +44,6 @@ class Meta: fields = ("server_uid", "host", "port", "api_level") ordered = True - @dataclass - class Data: - server_uid: str - host: str - port: int - api_level: int - server_uid = fields.Str() host = fields.Str() port = fields.Integer() @@ -47,11 +55,6 @@ class Meta: fields = ("parameter_name", "value") ordered = False - @dataclass - class Data: - key: str - value: str - key = fields.Str(required=True) value = fields.Str(required=True) @@ -62,33 +65,6 @@ class Meta: ordered = True unknown = EXCLUDE - @dataclass - class Data: - device_uid: str - driver: str - options: list[DriverOption] | None = None - - def _get_option(self, key): - for option in self.options: - if key == option["parameter_name"]: - return option["value"] - return None - - @property - def serial(self): - return self._get_option("serial") - - @property - def server_uid(self): - return self._get_option("server_uid") - - def __str__(self): - serial = self.serial - if serial is not None: - return f"{self.driver}:{serial}" - else: - return self.driver - device_uid = fields.Str() driver = fields.Str() options = fields.List(DriverOption(), required=False) @@ -99,11 +75,6 @@ class Meta: fields = ("diagonal", "off_diagonal") ordered = True - @dataclass - class Data: - diagonal: float - off_diagonal: float - diagonal = fields.Float() off_diagonal = fields.Float() @@ -131,31 +102,11 @@ class Meta: ) ordered = True - @dataclass - class Data: - channel: int - enable: bool | None = None - modulation: bool | None = None - oscillator: int | None = None - oscillator_frequency: int | None = None - offset: float | None = None - gains: Gains | None = None - range: float | None = None - range_unit: str | None = None - precompensation: dict[str, dict] | None = None - lo_frequency: Any | None = None - port_mode: str | None = None - port_delay: Any | None = None - scheduler_port_delay: float = 0.0 - delay_signal: float | None = None - marker_mode: str | None = None - amplitude: Any | None = None - channel = fields.Integer() enable = fields.Boolean(required=False) modulation = fields.Boolean(required=False) oscillator = fields.Integer(required=False) - oscillator_frequency = fields.Integer(required=False) + oscillator_frequency = fields.Float(required=False) offset = fields.Float(required=False) gains = fields.Nested(Gains, required=False) range = fields.Float(required=False) @@ -175,7 +126,7 @@ def _serialize(self, value, attr, obj, **kwargs): return value.name def _deserialize(self, value, attr, data, **kwargs): - return SignalType[value.upper()] + return recipe.SignalType[value.upper()] class AWG(QCCSSchema): @@ -189,14 +140,6 @@ class Meta: ) ordered = False - @dataclass - class Data: - awg: int - signal_type: SignalType = SignalType.SINGLE - qa_signal_id: str | None = None - command_table_match_offset: int | None = None - feedback_register: int | None = None - awg = fields.Integer() signal_type = SignalTypeField() qa_signal_id = fields.Str(required=False, allow_none=True) @@ -209,11 +152,6 @@ class Meta: fields = ("port", "device_uid") ordered = True - @dataclass - class Data: - port: int - device_uid: str - port = fields.Integer() device_uid = fields.Str() @@ -222,11 +160,6 @@ class Measurement(QCCSSchema): class Meta: fields = ("length", "channel") - @dataclass - class Data: - length: int - channel: int = 0 - length = fields.Integer() channel = fields.Integer() @@ -237,13 +170,9 @@ def _serialize(self, value, attr, obj, **kwargs): def _deserialize(self, value, attr, data, **kwargs): if value == 10e6: - return RefClkType._10MHZ.value - elif value == 100e6: - return RefClkType._100MHZ.value + return recipe.RefClkType._10MHZ.value else: - raise LabOneQControllerException( - f"UNsupported reference clock value {value}" - ) + return recipe.RefClkType._100MHZ.value class TriggeringModeField(fields.Field): @@ -251,23 +180,7 @@ def _serialize(self, value, attr, obj, **kwargs): return value.name def _deserialize(self, value, attr, data, **kwargs): - return TriggeringMode[value.upper()] - - -class AcquisitionTypeField(fields.Field): - def __init__(self, *args, **kwargs) -> None: - kwargs["allow_none"] = True - super().__init__(*args, **kwargs) - - def _serialize(self, value, attr, obj, **kwargs): - if value is None: - return None - return value.name - - def _deserialize(self, value, attr, data, **kwargs): - if value is None: - return None - return AcquisitionType[value.upper()] + return recipe.TriggeringMode[value.upper()] class Config(QCCSSchema): @@ -281,14 +194,6 @@ class Meta: ) ordered = True - @dataclass - class Data: - repetitions: int = 1 - reference_clock: RefClkType = None - holdoff: float = 0 - triggering_mode: TriggeringMode = TriggeringMode.DIO_FOLLOWER - sampling_rate: float | None = None - repetitions = fields.Int() reference_clock = RefClkTypeField() holdoff = fields.Float() @@ -309,16 +214,6 @@ class Meta: ) ordered = True - @dataclass - class Data: - device_uid: str - config: Config.Data - awgs: list[AWG.Data] = None - outputs: list[IO.Data] = None - inputs: list[IO.Data] = None - measurements: list[Measurement.Data] = field(default_factory=list) - ppchannels: list[dict[str, Any]] | None = None - device_uid = fields.Str() config = fields.Nested(Config) awgs = fields.List(fields.Nested(AWG), required=False) @@ -333,14 +228,6 @@ class Meta: fields = ("id", "device_id", "channel", "frequency", "param") ordered = True - @dataclass - class Data: - id: str - device_id: str - channel: int - frequency: float = None - param: str = None - id = fields.Str() device_id = fields.Str() channel = fields.Int() @@ -353,15 +240,6 @@ class Meta: fields = ("signal_id", "device_id", "awg", "channels", "weights", "threshold") ordered = True - @dataclass - class Data: - signal_id: str - device_id: str - awg: int - channels: list[int] - weights: str = None - threshold: float = 0.0 - signal_id = fields.Str() device_id = fields.Str() awg = fields.Int() @@ -375,12 +253,6 @@ class Meta: fields = ("section_id", "signal_id", "acquire_length") ordered = True - @dataclass - class Data: - section_id: str - signal_id: str - acquire_length: int - section_id = fields.Str() signal_id = fields.Str() acquire_length = fields.Int() @@ -388,7 +260,7 @@ class Data: class NtStepKeyField(fields.Field): def _deserialize(self, value, attr, data, **kwargs): - return NtStepKey(indices=tuple(value["indices"])) + return recipe.NtStepKey(indices=tuple(value["indices"])) class RealtimeExecutionInit(QCCSSchema): @@ -402,14 +274,6 @@ class Meta: ) ordered = True - @dataclass - class Data: - device_id: str - awg_id: int - seqc_ref: str - wave_indices_ref: str - nt_step: NtStepKey - device_id = fields.Str() awg_id = fields.Int() seqc_ref = fields.Str() @@ -428,23 +292,9 @@ class Meta: "simultaneous_acquires", "total_execution_time", "max_step_execution_time", - "acquisition_type", ) ordered = True - - @dataclass - class Data: - initializations: list[Initialization.Data] - realtime_execution_init: list[RealtimeExecutionInit.Data] - oscillator_params: list[OscillatorParam.Data] = field(default_factory=list) - integrator_allocations: list[IntegratorAllocation.Data] = field( - default_factory=list - ) - acquire_lengths: list[AcquireLength.Data] = field(default_factory=list) - simultaneous_acquires: list[dict[str, str]] = field(default_factory=list) - total_execution_time: float = None - max_step_execution_time: float = None - acquisition_type: AcquisitionType = AcquisitionTypeField() + unknown = EXCLUDE initializations = fields.List(fields.Nested(Initialization)) realtime_execution_init = fields.List(fields.Nested(RealtimeExecutionInit)) @@ -458,22 +308,14 @@ class Data: ) total_execution_time = fields.Float(required=False, allow_none=True) max_step_execution_time = fields.Float(required=False, allow_none=True) - acquisition_type = AcquisitionTypeField() -class Recipe(QCCSSchema): +class JsonRecipeLoader(QCCSSchema): class Meta: unknown = EXCLUDE fields = ("line_endings", "experiment", "servers", "devices") ordered = False - @dataclass - class Data: - line_endings: str - experiment: Experiment.Data - servers: list[Server.Data] | None = None - devices: list[Device.Data] | None = None - line_endings = fields.Str() experiment = fields.Nested(Experiment) servers = fields.List(fields.Nested(Server), required=False) diff --git a/laboneq/implementation/payload_builder/payload_builder.py b/laboneq/implementation/payload_builder/payload_builder.py index 85a79c3..c2078ff 100644 --- a/laboneq/implementation/payload_builder/payload_builder.py +++ b/laboneq/implementation/payload_builder/payload_builder.py @@ -4,9 +4,8 @@ import copy import logging import uuid -from collections import Counter from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Union +from typing import TYPE_CHECKING, Any, Iterator from laboneq.data.compilation_job import ( CompilationJob, @@ -21,15 +20,6 @@ ) from laboneq.data.execution_payload import ( ExecutionPayload, - Initialization, - InitializationConfiguration, - LoopType, - NearTimeOperation, - NearTimeOperationType, - NearTimeProgram, - NtStepKey, - RealTimeExecutionInit, - Recipe, ServerType, TargetChannelCalibration, TargetChannelType, @@ -39,25 +29,14 @@ TargetSetup, ) from laboneq.data.experiment_description import ( - Acquire, - AcquireLoopNt, - AcquireLoopRt, - Call, Delay, - ExecutionType, Experiment, - Operation, PlayPulse, Pulse, PulseFunctional, Reserve, Section, - Set, - Sweep, - SweepParameter, ) -from laboneq.data.experiment_description.experiment_helper import ExperimentHelper -from laboneq.data.scheduled_experiment import ScheduledExperiment from laboneq.data.setup_description import ( DeviceType, Instrument, @@ -66,6 +45,9 @@ ) from laboneq.data.setup_description.setup_helper import SetupHelper from laboneq.dsl.calibration.signal_calibration import SignalCalibration +from laboneq.implementation.payload_builder.convert_from_legacy_json_recipe import ( + convert_from_legacy_json_recipe, +) from laboneq.interfaces.compilation_service.compilation_service_api import ( CompilationServiceAPI, ) @@ -81,8 +63,8 @@ class GlobalSetupProperties: global_leader: str = None is_desktop_setup: bool = False - internal_followers: List[str] = field(default_factory=list) - clock_settings: Dict[str, Any] = field(default_factory=dict) + internal_followers: list[str] = field(default_factory=list) + clock_settings: dict[str, Any] = field(default_factory=dict) class PayloadBuilder(PayloadBuilderAPI): @@ -145,19 +127,17 @@ def connected_outputs(i: Instrument) -> dict[str, list[int]]: ls_ports: dict[str, list[int]] = {} for c in i.connections: ports: list[int] = [] - for port in i.ports: + for port in c.physical_channel.ports: if ( - port.physical_channel - and port.physical_channel.uid == c.physical_channel.uid + port.path.startswith("SIGOUTS") + or port.path.startswith("SGCHANNELS") + or port.path.startswith("QACHANNELS") ): - if ( - port.path.startswith("SIGOUTS") - or port.path.startswith("SGCHANNELS") - or port.path.startswith("QACHANNELS") - ): - ports.append(int(port.path.split("/")[1])) + ports.append(int(port.path.split("/")[1])) if ports: - ls_ports.setdefault(c.logical_signal.path, []).extend(ports) + ls_ports.setdefault( + c.logical_signal.group + "/" + c.logical_signal.name, [] + ).extend(ports) return ls_ports target_setup.devices = [ @@ -185,7 +165,7 @@ def build_payload( self, device_setup: Setup, experiment: Experiment, - signal_mappings: Dict[str, str], + signal_mappings: dict[str, str], ) -> ExecutionPayload: """ Compose an experiment from a setup descriptor and an experiment descriptor. @@ -201,250 +181,29 @@ def build_payload( job = CompilationJob(experiment_info=experiment_info) job_id = self._compilation_service.submit_compilation_job(job) - scheduled_experiment: ScheduledExperiment = ( - self._compilation_service.compilation_job_result(job_id) - ) - - target_setup = self.convert_to_target_setup(device_setup) - - device_dict = {d.uid: d for d in target_setup.devices} - - target_recipe = Recipe() - - global_setup_properties = self._analyze_setup(device_setup, experiment_info) - - config_dict = self._calc_config(global_setup_properties) - - for k, v in self._analyze_dio(device_setup, global_setup_properties).items(): - if k not in config_dict: - config_dict[k] = {} - config_dict[k]["triggering_mode"] = v - - def build_config(init): - device_uid = init["device_uid"] - if device_uid in config_dict: - config = config_dict[device_uid] - return InitializationConfiguration( - reference_clock=10e6, # FIXME: hardcoded - triggering_mode=config.get("triggering_mode"), - ) - - return InitializationConfiguration( - reference_clock=10e6, # FIXME: hardcoded - ) - - def _find_initialization(recipe, instrument_uid): - for init in recipe["experiment"]["initializations"]: - if init["device_uid"] == instrument_uid: - return init - return None - - for srv in device_setup.servers.values(): - if srv.leader_uid is not None: - init = _find_initialization(scheduled_experiment.recipe, srv.leader_uid) - if init is not None: - init["config"]["repetitions"] = 1 - init["config"]["holdoff"] = 0 - - # adapt initializations to consider setup internal connections - - _logger.info( - f"initializations: {scheduled_experiment.recipe['experiment']['initializations']}" - ) - target_recipe.initializations = [ - Initialization( - device=device_dict[i["device_uid"]], - config=build_config(i), - ) - for i in scheduled_experiment.recipe["experiment"]["initializations"] - ] - _logger.info(f"Built initializations: {target_recipe.initializations}") - - target_recipe.realtime_execution_init = [ - RealTimeExecutionInit( - device=next(d for d in target_setup.devices if d.uid == i["device_id"]), - awg_id=i["awg_id"], - seqc=i["seqc_ref"], # todo: create SourceCode object - wave_indices_ref=i["wave_indices_ref"], - nt_step=NtStepKey(**i["nt_step"]), + scheduled_experiment = self._compilation_service.compilation_job_result(job_id) + if isinstance(scheduled_experiment.recipe, dict): + scheduled_experiment.recipe = convert_from_legacy_json_recipe( + scheduled_experiment.recipe ) - for i in scheduled_experiment.recipe["experiment"][ - "realtime_execution_init" - ] - ] - ntp = NearTimeProgramFactory().make(experiment) - _logger.info(f"Built NearTimeProgram: {ntp}") + target_setup = self.convert_to_target_setup(device_setup) run_job = ExecutionPayload( uid=uuid.uuid4().hex, target_setup=target_setup, compiled_experiment_hash=scheduled_experiment.uid, - recipe=target_recipe, - near_time_program=ntp, src=scheduled_experiment.src, # todo: create SourceCode object scheduled_experiment=scheduled_experiment, ) return run_job - def _calc_config( - self, global_setup_properties: GlobalSetupProperties - ) -> Dict[str, Any]: - retval = {} - if global_setup_properties.global_leader is not None: - retval[global_setup_properties.global_leader.uid] = { - "config": { - "repetitions": 1, - "holdoff": 0, - } - } - if global_setup_properties.is_desktop_setup: - retval[global_setup_properties.global_leader.uid]["config"][ - "triggering_mode" - ] = "desktop_leader" - - if global_setup_properties.is_desktop_setup: - # Internal followers are followers on the same device as the leader. This - # is necessary for the standalone SHFQC, where the SHFSG part does neither - # appear in the PQSC device connections nor the DIO connections. - for f in global_setup_properties.internal_followers: - if f.uid not in retval: - retval[f.uid] = {"config": {}} - retval[f.uid]["config"]["triggering_mode"] = "dio_follower" - - return retval - - def _analyze_dio( - self, device_setup: Setup, global_setup_properties: GlobalSetupProperties - ): - retval = {} - for sic in device_setup.setup_internal_connections: - if sic.from_port.path.startswith("DIOS"): - if global_setup_properties.is_desktop_setup: - retval[sic.to_instrument.uid] = "desktop_dio_follower" - else: - retval[sic.to_instrument.uid] = "dio_follower" - - if sic.from_port.path.startswith("ZSYNCS"): - retval[sic.from_instrument.uid] = "zsync_follower" - - return retval - - def _analyze_setup( - self, device_setup: Setup, experiment_info: ExperimentInfo - ) -> GlobalSetupProperties: - retval = GlobalSetupProperties() - - def get_first_instr_of(device_infos: List[DeviceInfo], type) -> DeviceInfo: - return next((instr for instr in device_infos if instr.device_type == type)) - - device_info_dict: Dict[str, DeviceInfo] = {} - for signal in experiment_info.signals: - device_info_dict[signal.device.uid] = signal.device - - device_type_list = [i.device_type for i in device_info_dict.values()] - type_counter = Counter(device_type_list) - has_pqsc = type_counter[DeviceInfoType.PQSC] > 0 - has_hdawg = type_counter[DeviceInfoType.HDAWG] > 0 - has_shfsg = type_counter[DeviceInfoType.SHFSG] > 0 - has_shfqa = type_counter[DeviceInfoType.SHFQA] > 0 - shf_types = {DeviceInfoType.SHFQA, DeviceInfoType.SHFQC, DeviceInfoType.SHFSG} - has_shf = bool(shf_types.intersection(set(device_type_list))) - - # Basic validity checks - signal_infos = experiment_info.signals - - used_devices = set(info.device.device_type for info in signal_infos) - - def get_instrument_by_uid(uid) -> Instrument: - return next((i for i in device_setup.instruments if i.uid == uid), None) - - used_device_serials = set( - get_instrument_by_uid(info.device.uid).address for info in signal_infos - ) - if ( - DeviceInfoType.HDAWG in used_devices - and DeviceInfoType.UHFQA in used_devices - and bool(shf_types.intersection(used_devices)) - ): - raise RuntimeError( - "Setups with signals on each of HDAWG, UHFQA and SHF type " - + "instruments are not supported" - ) - - retval.is_desktop_setup = not has_pqsc and ( - used_devices == {DeviceInfoType.HDAWG} - or used_devices == {DeviceInfoType.SHFSG} - or used_devices == {DeviceInfoType.SHFQA} - or used_devices == {DeviceInfoType.SHFQA, DeviceInfoType.SHFSG} - and len(used_device_serials) == 1 # SHFQC - or used_devices == {DeviceInfoType.HDAWG, DeviceInfoType.UHFQA} - or ( - used_devices == {DeviceInfoType.UHFQA} and has_hdawg - ) # No signal on leader - ) - if ( - not has_pqsc - and not retval.is_desktop_setup - and used_devices != {DeviceInfoType.UHFQA} - and bool(used_devices) # Allow empty experiment (used in tests) - ): - raise RuntimeError( - f"Unsupported device combination {used_devices} for small setup" - ) - - leader = experiment_info.global_leader_device - device_infos = list(device_info_dict.values()) - if retval.is_desktop_setup: - if leader is None: - if has_hdawg: - leader = get_first_instr_of(device_infos, DeviceInfoType.HDAWG) - elif has_shfqa: - leader = get_first_instr_of(device_infos, DeviceInfoType.SHFQA) - if has_shfsg: # SHFQC - retval.internal_followers = [ - get_first_instr_of(device_infos, DeviceInfoType.SHFSG) - ] - elif has_shfsg: - leader = get_first_instr_of(device_infos, DeviceInfoType.SHFSG) - - _logger.debug("Using desktop setup configuration with leader %s", leader) - - if has_hdawg or has_shfsg and not has_shfqa: - _logger.warning( - "Not analyzing if awg 0 of leader is used. Triggering may fail." - ) - # TODO: Check if awg 0 of leader is used, and add dummy signal if not - - has_qa = type_counter[DeviceInfoType.SHFQA] > 0 or type_counter["uhfqa"] > 0 - is_hdawg_solo = ( - type_counter[DeviceInfoType.HDAWG] == 1 and not has_shf and not has_qa - ) - if is_hdawg_solo: - first_hdawg = get_first_instr_of(device_infos, DeviceInfoType.HDAWG) - if first_hdawg.reference_clock_source is None: - retval.clock_settings[first_hdawg.uid] = "internal" - else: - if not has_hdawg and has_shfsg: # SHFSG or SHFQC solo - first_shfsg = get_first_instr_of(device_infos, DeviceInfoType.SHFSG) - if first_shfsg.reference_clock_source is None: - retval.clock_settings[first_shfsg.uid] = "internal" - if not has_hdawg and has_shfqa: # SHFQA or SHFQC solo - first_shfqa = get_first_instr_of(device_infos, DeviceInfoType.SHFQA) - if first_shfqa.reference_clock_source is None: - retval.clock_settings[first_shfqa.uid] = "internal" - - retval.use_2GHz_for_HDAWG = has_shf - retval.global_leader = leader - - return retval - @classmethod def extract_experiment_info( cls, exp: Experiment, setup: Setup, - signal_mappings: Dict[str, str], + signal_mappings: dict[str, str], ) -> ExperimentInfo: experiment_info = ExperimentInfo() experiment_info.uid = exp.uid @@ -536,7 +295,6 @@ def extract_section_info( section=section_info, signal=signal, pulse_def=pulse_def, - uid=uuid.uuid4().hex, ) ) elif isinstance(child, Delay): @@ -546,15 +304,12 @@ def extract_section_info( section=section_info, signal=signal, delay=child.delay, - uid=uuid.uuid4().hex, ) ) elif isinstance(child, Reserve): signal = next(s for s in signals if s.uid == child.signal) section_signal_pulses.append( - SectionSignalPulse( - section=section_info, signal=signal, uid=uuid.uuid4().hex - ) + SectionSignalPulse(section=section_info, signal=signal) ) return section_info @@ -596,143 +351,3 @@ def extract_pulse_info(cls, pulse: Pulse) -> PulseDef: pulse_def.amplitude = pulse.amplitude return pulse_def - - @classmethod - def convert_experiment_to_near_time_program( - cls, experiment: Experiment - ) -> NearTimeProgram: - ntp = NearTimeProgram() - ntp.uid = experiment.uid - - return ntp - - -class NearTimeProgramFactory: - def __init__(self): - self._near_time_program = NearTimeProgram() - self._current_scope = self._near_time_program - - def make(self, experiment: Experiment) -> NearTimeProgram: - self._handle_children(experiment.sections, experiment.uid) - return self._near_time_program - - def _append_statement(self, statement: NearTimeOperation): - self._current_scope.children.append(statement) - - def _sub_scope(self, generator, *args): - new_scope = NearTimeOperation() - saved_scope = self._current_scope - self._current_scope = new_scope - generator(*args) - self._current_scope = saved_scope - return new_scope - - def _handle_children( - self, children: List[Union[Operation, Section]], parent_uid: str - ): - for child in children: - if isinstance(child, Operation): - self._append_statement( - self._statement_from_operation(child, parent_uid) - ) - elif isinstance(child, AcquireLoopNt): - loop_body = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement( - NearTimeOperation( - operation_type=NearTimeOperationType.FOR_LOOP, - children=[loop_body], - args={ - "count": child.count, - "loop_type": LoopType.SWEEP, - }, - ) - ) - elif isinstance(child, AcquireLoopRt): - loop_body = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement( - NearTimeOperation( - operation_type=NearTimeOperationType.ACQUIRE_LOOP_RT, - children=[loop_body], - args={ - "count": child.count, - "uid": child.uid, - "averaging_mode": str(child.averaging_mode), - "acquisition_type": str(child.acquisition_type), - }, - ) - ) - elif isinstance(child, Sweep): - values = ExperimentHelper.get_parameter_values(child.parameters[0]) - count = len(values) - loop_body = self._sub_scope(self._handle_sweep, child) - loop_type = ( - LoopType.HARDWARE - if child.execution_type == ExecutionType.REAL_TIME - else LoopType.SWEEP - ) - self._append_statement( - NearTimeOperation( - operation_type=NearTimeOperationType.FOR_LOOP, - children=[loop_body], - args={ - "count": count, - "loop_type": loop_type, - }, - ) - ) - else: - sub_sequence = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement(sub_sequence) - - def _handle_sweep(self, sweep: Sweep): - for parameter in sweep.parameters: - self._append_statement(self._statement_from_param(parameter)) - self._handle_children(sweep.children, sweep.uid) - - def _statement_from_param(self, parameter: SweepParameter): - return NearTimeOperation( - operation_type=NearTimeOperationType.SET_SOFTWARE_PARM, - args={ - "parameter_uid": parameter.uid, - "values": ExperimentHelper.get_parameter_values(parameter), - "axis_name": parameter.axis_name, - }, - ) - - def _statement_from_operation(self, operation, parent_uid: str): - if isinstance(operation, Call): - return NearTimeOperation( - operation_type=NearTimeOperationType.CALL, - args={"func_name": operation.func_name, "args": operation.args}, - ) - if isinstance(operation, Set): - return NearTimeOperation( - operation_type=NearTimeOperationType.SET, - args={"signal_uid=": operation.signal_uid, "value": operation.value}, - ) - if isinstance(operation, Acquire): - return NearTimeOperation( - operation_type=NearTimeOperationType.ACQUIRE, - args={ - "handle": operation.handle, - "signal": operation.signal, - "parent_uid": parent_uid, - }, - ) - if isinstance(operation, Delay): - return NearTimeOperation( - operation_type=NearTimeOperationType.DELAY, - ) - if isinstance(operation, Reserve): - return NearTimeOperation(operation_type=NearTimeOperationType.RESERVE) - if isinstance(operation, PlayPulse): - return NearTimeOperation( - operation_type=NearTimeOperationType.PLAY_PULSE, - ) - return NearTimeOperation(operation_type=NearTimeOperationType.NO_OPERATION) diff --git a/laboneq/implementation/runner/runner_legacy.py b/laboneq/implementation/runner/runner_legacy.py index d257f17..8a2064c 100644 --- a/laboneq/implementation/runner/runner_legacy.py +++ b/laboneq/implementation/runner/runner_legacy.py @@ -5,16 +5,8 @@ import time from laboneq import controller as ctrl -from laboneq.data.execution_payload import ( - ExecutionPayload, - LoopType, - NearTimeOperation, - NearTimeOperationType, - NearTimeProgram, - TargetSetup, -) +from laboneq.data.execution_payload import ExecutionPayload, TargetSetup from laboneq.data.experiment_results import ExperimentResults -from laboneq.executor import executor from laboneq.interfaces.runner.runner_api import RunnerAPI from laboneq.interfaces.runner.runner_control_api import RunnerControlAPI @@ -105,184 +97,3 @@ def disconnect(self): Disconnect from the setup. """ pass - - -class ExecutionFactoryFromNearTimeProgram(executor.ExecutionFactory): - def make(self, near_time_program: NearTimeProgram) -> executor.Statement: - self._handle_children(near_time_program.children, near_time_program.uid) - return self._root_sequence - - @staticmethod - def is_operation(op: NearTimeOperationType): - return op not in ( - NearTimeOperationType.ACQUIRE_LOOP_NT, - NearTimeOperationType.ACQUIRE_LOOP_RT, - NearTimeOperationType.FOR_LOOP, - ) - - @staticmethod - def convert_loop_type(loop_type: LoopType): - return { - LoopType.AVERAGE: executor.LoopType.AVERAGE, - LoopType.SWEEP: executor.LoopType.SWEEP, - LoopType.HARDWARE: executor.LoopType.HARDWARE, - }[loop_type] - - def _handle_children(self, children, parent_uid: str): - for child in children: - if child.operation_type is None: - body = self._sub_scope(self._handle_children, child.children, child.uid) - sequence = executor.Sequence() - sequence.append_statement(body) - if self.is_operation(child.operation_type): - self._append_statement( - self._statement_from_operation(child, parent_uid) - ) - elif child.operation_type == NearTimeOperationType.FOR_LOOP: - loop_body = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement( - executor.ForLoop( - child.args["count"], - loop_body, - self.convert_loop_type(child.args["loop_type"]), - ) - ) - elif child.operation_type == NearTimeOperationType.ACQUIRE_LOOP_NT: - loop_body = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement( - executor.ExecRT( - count=child.count, - body=loop_body, - uid=child.uid, - averaging_mode=child.averaging_mode, - acquisition_type=child.acquisition_type, - ) - ) - else: - sub_sequence = self._sub_scope( - self._handle_children, child.children, child.uid - ) - self._append_statement(sub_sequence) - - def _handle_sweep(self, sweep: NearTimeOperation): - for parameter in sweep.args["parameters"]: - self._append_statement(self._statement_from_param(parameter)) - self._handle_children(sweep.children, sweep.uid) - - def _statement_from_operation(self, operation, parent_uid: str): - if operation.operation_type == NearTimeOperationType.CALL: - return executor.ExecUserCall( - operation.args["func_name"], operation.args["args"] - ) - if operation.operation_type == NearTimeOperationType.SET: - return executor.ExecSet(operation.args["path"], operation.args["value"]) - if operation.operation_type == NearTimeOperationType.PLAY_PULSE: - return executor.Nop() - if operation.operation_type == NearTimeOperationType.DELAY: - return executor.Nop() - if operation.operation_type == NearTimeOperationType.RESERVE: - return executor.Nop() - if operation.operation_type == NearTimeOperationType.ACQUIRE: - return executor.ExecAcquire(operation.handle, operation.signal, parent_uid) - - return executor.Nop() - - -from laboneq.data.execution_payload import ( - ExecutionPayload, - LoopType, - NearTimeOperation, - NearTimeOperationType, - NearTimeProgram, - TargetSetup, -) -from laboneq.data.execution_payload.execution_payload_helper import ( - ExecutionPayloadHelper, -) -from laboneq.executor import executor - - -def convert_loop_type(loop_type: LoopType): - return { - LoopType.AVERAGE: executor.LoopFlags.AVERAGE, - LoopType.SWEEP: executor.LoopFlags.SWEEP, - LoopType.HARDWARE: executor.LoopFlags.HARDWARE, - }[loop_type] - - -def convert(near_time_program: NearTimeProgram): - root_marker = "____ROOT___" - context = {"nodes_by_parent": {}} - - def execution_builder_visitor(operation, context, parent): - if parent is not None: - parent_hash = id(parent) - else: - parent_hash = root_marker - current_node_hash = id(operation) - if parent_hash not in context["nodes_by_parent"]: - context["nodes_by_parent"][parent_hash] = [] - - _logger.debug( - f"Visiting {operation}, context: {context}, current node hash: {current_node_hash}, parent hash: {parent_hash}" - ) - if isinstance(operation, NearTimeProgram) or operation.operation_type is None: - sequence = executor.Sequence() - num_chidren = 0 - if current_node_hash in context["nodes_by_parent"]: - num_chidren = len(context["nodes_by_parent"][current_node_hash]) - for child in context["nodes_by_parent"][current_node_hash]: - sequence.append_statement(child) - - _logger.debug(f"Appended {num_chidren} statements to sequence") - context["nodes_by_parent"][parent_hash].append(sequence) - - elif operation.operation_type == NearTimeOperationType.PLAY_PULSE: - context["nodes_by_parent"][parent_hash].append(executor.Nop()) - elif operation.operation_type == NearTimeOperationType.SET: - context["nodes_by_parent"][parent_hash].append( - executor.ExecSet(operation.args["path"], operation.args["value"]) - ) - - elif operation.operation_type == NearTimeOperationType.SET_SOFTWARE_PARM: - param_name = operation.args["parameter_uid"] - values = operation.args["values"] - axis_name = operation.args["axis_name"] - context["nodes_by_parent"][parent_hash].append( - executor.SetSoftwareParam(param_name, values, axis_name) - ) - - elif operation.operation_type == NearTimeOperationType.FOR_LOOP: - loop_body = executor.Sequence() - if current_node_hash in context["nodes_by_parent"]: - for child in context["nodes_by_parent"][current_node_hash]: - loop_body.append_statement(child) - loop = executor.ForLoop( - operation.args["count"], - loop_body, - convert_loop_type(operation.args["loop_type"]), - ) - context["nodes_by_parent"][parent_hash].append(loop) - elif operation.operation_type == NearTimeOperationType.ACQUIRE_LOOP_RT: - loop_body = executor.Sequence() - if current_node_hash in context["nodes_by_parent"]: - for child in context["nodes_by_parent"][current_node_hash]: - loop_body.append_statement(child) - loop = executor.ExecRT( - operation.args["count"], - loop_body, - operation.uid, - averaging_mode=operation.args["averaging_mode"], - acquisition_type=operation.args["acquisition_type"], - ) - context["nodes_by_parent"][parent_hash].append(loop) - - ExecutionPayloadHelper.accept_near_time_program_visitor( - near_time_program, execution_builder_visitor, context - ) - _logger.debug(f"Context: {context}") - return context["nodes_by_parent"][root_marker][0] diff --git a/laboneq/interfaces/application_management/laboneq_settings.py b/laboneq/interfaces/application_management/laboneq_settings.py index edb5727..2382998 100644 --- a/laboneq/interfaces/application_management/laboneq_settings.py +++ b/laboneq/interfaces/application_management/laboneq_settings.py @@ -6,7 +6,7 @@ class LabOneQSettings(ABC): """ - LabOneQSettings is an interface for accessing the settings of the LabOneQ application. + LabOneQSettings is an interface for accessing the settings of the LabOne Q application. Note: There are no methods here, this is just a marker interface. A concrete implementation will have a property for each setting. """ diff --git a/laboneq/interfaces/data_storage/data_storage_api.py b/laboneq/interfaces/data_storage/data_storage_api.py index fe31af0..5b625d0 100644 --- a/laboneq/interfaces/data_storage/data_storage_api.py +++ b/laboneq/interfaces/data_storage/data_storage_api.py @@ -26,7 +26,7 @@ def store( self, data: Any, key: str = None, metadata: Optional[Dict[str, Any]] = None ) -> None: """ - Store data in the database. Only data that can be serialized with the L1Q serializer can be stored. + Store data in the database. Only data that can be serialized with the LabOne Q serializer can be stored. Args: key (str): The key to store the data under. diff --git a/laboneq/interfaces/runner/runner_api.py b/laboneq/interfaces/runner/runner_api.py index 6ed93bd..09dc5fa 100644 --- a/laboneq/interfaces/runner/runner_api.py +++ b/laboneq/interfaces/runner/runner_api.py @@ -1,27 +1,30 @@ # Copyright 2020 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -from abc import ABC +from abc import ABC, abstractmethod from laboneq.data.execution_payload import ExecutionPayload from laboneq.data.experiment_results import ExperimentResults class RunnerAPI(ABC): + @abstractmethod def submit_execution_payload(self, job: ExecutionPayload): """ Submit an experiment run job. """ - raise NotImplementedError + pass + @abstractmethod def run_job_status(self, job_id: str): """ Get the status of an experiment run job. """ - raise NotImplementedError + pass + @abstractmethod def run_job_result(self, job_id: str) -> ExperimentResults: """ Get the result of an experiment run job. Blocks until the result is available. """ - raise NotImplementedError + pass diff --git a/laboneq/interfaces/runner/runner_control_api.py b/laboneq/interfaces/runner/runner_control_api.py index 2d99cce..9ae1326 100644 --- a/laboneq/interfaces/runner/runner_control_api.py +++ b/laboneq/interfaces/runner/runner_control_api.py @@ -1,32 +1,36 @@ # Copyright 2023 Zurich Instruments AG # SPDX-License-Identifier: Apache-2.0 -from abc import ABC +from abc import ABC, abstractmethod from laboneq.data.execution_payload import TargetSetup class RunnerControlAPI(ABC): + @abstractmethod def connect(self, setup: TargetSetup, do_emulation: bool = True): """ Connect to the setup """ - raise NotImplementedError + pass + @abstractmethod def start(self): """ Start the experiment runner. It will start processing jobs from the job queue. """ - raise NotImplementedError + pass + @abstractmethod def stop(self): """ Stop the experiment runner. It will stop processing jobs from the job queue. """ - raise NotImplementedError + pass + @abstractmethod def disconnect(self): """ Disconnect from the setup. """ - raise NotImplementedError + pass diff --git a/laboneq/openqasm3/gate_store.py b/laboneq/openqasm3/gate_store.py index 128facf..2a46748 100644 --- a/laboneq/openqasm3/gate_store.py +++ b/laboneq/openqasm3/gate_store.py @@ -19,9 +19,9 @@ def lookup_gate( kwargs = kwargs or {} return self.gates[(self.gate_map.get(name, name), qubits)](*args, **kwargs) - def map_gate(self, qasm_name: str, l1q_name: str): - """Define mapping from qasm gate name to L1Q gate name.""" - self.gate_map[qasm_name] = l1q_name + def map_gate(self, qasm_name: str, labone_q_name: str): + """Define mapping from qasm gate name to LabOne Q gate name.""" + self.gate_map[qasm_name] = labone_q_name def register_gate_section(self, name: str, qubit_names: list[str], section_factory): """Register a LabOne Q section factory as a gate.""" diff --git a/laboneq/openqasm3/openqasm3_importer.py b/laboneq/openqasm3/openqasm3_importer.py index 5dcde26..486b4f6 100644 --- a/laboneq/openqasm3/openqasm3_importer.py +++ b/laboneq/openqasm3/openqasm3_importer.py @@ -14,7 +14,9 @@ from laboneq.core.exceptions import LabOneQException from laboneq.dsl.experiment import Experiment, Section from laboneq.dsl.experiment.utils import id_generator -from laboneq.dsl.quantum.qubits import Qubit, SignalType +from laboneq.dsl.quantum.quantum_element import SignalType +from laboneq.dsl.quantum.qubit import Qubit +from laboneq.dsl.quantum.transmon import Transmon from laboneq.openqasm3.expression import eval_expression, eval_lvalue from laboneq.openqasm3.gate_store import GateStore from laboneq.openqasm3.namespace import ClassicalRef, NamespaceNest, QubitRef @@ -81,7 +83,7 @@ class OpenQasm3Importer: def __init__( self, gate_store: GateStore, - qubits: dict[str, Qubit] = None, + qubits: dict[str, Qubit, Transmon] = None, ): self.gate_store = gate_store self.dsl_qubits = qubits diff --git a/laboneq/simple.py b/laboneq/simple.py index 1d87fa7..aeafb94 100644 --- a/laboneq/simple.py +++ b/laboneq/simple.py @@ -47,11 +47,17 @@ Sweep, pulse_library, ) -from laboneq.dsl.quantum import QuantumOperation, Qubit, QubitParameters +from laboneq.dsl.quantum import ( + QuantumOperation, + Qubit, + QubitParameters, + Transmon, + TransmonParameters, +) from laboneq.dsl.result import Results from laboneq.dsl.session import Session from laboneq.dsl.utils import has_onboard_lo -from laboneq.implementation.data_storage.l1q_database_wrapper import L1QDatabase +from laboneq.implementation.data_storage.laboneq_database import DataStore from laboneq.openqasm3.openqasm3_importer import exp_from_qasm from laboneq.pulse_sheet_viewer.pulse_sheet_viewer import show_pulse_sheet from laboneq.simulator.output_simulator import OutputSimulator diff --git a/requirements-dev.txt b/requirements-dev.txt index 23f4936..8668c42 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,6 +9,7 @@ pytest-cov pytest_mock pytest-httpserver pytest-xdist +pytest-benchmark[aspect,elasticsearch] @ git+https://github.com/ckutlu/pytest-benchmark.git@master-ckutlu jsonschema requests nbconvert @@ -17,10 +18,16 @@ flaky==3.7.0 pyvisa-py==0.5.3 qcodes qiskit +qiskit_experiments +qiskit_qasm3_import +pygsti ipykernel rustworkx ipython +# Packaging test +pydeps + # Formatting (must match version in CI) black[jupyter]==22.10 pre-commit