diff --git a/conftest.py b/conftest.py index 009d37a9..46bcc2db 100644 --- a/conftest.py +++ b/conftest.py @@ -21,9 +21,7 @@ from recirq.qcqmc.hamiltonian import ( HamiltonianData, HamiltonianFileParams, - build_hamiltonian_from_file, ) -from recirq.qcqmc.optimize_wf import build_pp_plus_trial_wavefunction from recirq.qcqmc.trial_wf import ( PerfectPairingPlusTrialWavefunctionParams, TrialWavefunctionData, @@ -45,7 +43,7 @@ def fixture_4_qubit_ham(package_tmp_path) -> HamiltonianData: path_prefix=str(package_tmp_path), ) - hamiltonian_data = build_hamiltonian_from_file(params) + hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(params) return hamiltonian_data @@ -60,7 +58,7 @@ def fixture_8_qubit_ham(package_tmp_path) -> HamiltonianData: path_prefix=str(package_tmp_path), ) - hamiltonian_data = build_hamiltonian_from_file(params) + hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(params) return hamiltonian_data @@ -76,7 +74,7 @@ def fixture_12_qubit_ham(package_tmp_path) -> HamiltonianData: path_prefix=str(package_tmp_path), ) - hamiltonian_data = build_hamiltonian_from_file(params) + hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(params) return hamiltonian_data @@ -94,7 +92,7 @@ def fixture_4_qubit_ham_and_trial_wf( path_prefix=fixture_4_qubit_ham.params.path_prefix, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_4_qubit_ham.params: fixture_4_qubit_ham} ) @@ -115,7 +113,7 @@ def fixture_8_qubit_ham_and_trial_wf( path_prefix=fixture_8_qubit_ham.params.path_prefix, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_8_qubit_ham.params: fixture_8_qubit_ham} ) diff --git a/dev_tools/check_notebooks.py b/dev_tools/check_notebooks.py index c9ae7f4d..6c4af78a 100644 --- a/dev_tools/check_notebooks.py +++ b/dev_tools/check_notebooks.py @@ -91,6 +91,9 @@ def check_notebook(notebook_fn: str): 'docs/hfvqe/quickstart.ipynb', 'docs/fermi_hubbard/publication_results.ipynb', 'docs/fermi_hubbard/experiment_example.ipynb', + 'docs/qcqmc/high_level.ipynb', + 'docs/qcqmc/full_workflow.ipynb', + 'docs/qcqmc/experimental_wavefunctions.ipynb', ] diff --git a/docs/qcqmc/experimental_wavefunctions.ipynb b/docs/qcqmc/experimental_wavefunctions.ipynb new file mode 100644 index 00000000..a1a5bf6a --- /dev/null +++ b/docs/qcqmc/experimental_wavefunctions.ipynb @@ -0,0 +1,221 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Analyzing the Experimentally Reconstructed Wavefunctions\n", + "\n", + "The experimentally reconstructe wavefunctions from the \n", + "[QCQMC](https://www.nature.com/articles/s41586-021-04351-z) paper are available\n", + "for download from [zenodo](https://zenodo.org/records/10141262)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import urllib.request\n", + "import tarfile\n", + "\n", + "stream = urllib.request.urlopen(\"https://zenodo.org/records/10141262/files/qcqmc_data.tar.gz\")\n", + "\n", + "with tarfile.open(fileobj=stream, mode='r|gz') as tf:\n", + " tf.extractall()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Look at the README from the downloaded data\n", + "from IPython.display import Markdown, display\n", + "display(Markdown('qcqmc_data/README.md'))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Loading the wavefunctions\n", + "\n", + "Here we will reproduce some of the results from Section E of the SI from [QCQMC](https://www.nature.com/articles/s41586-021-04351-z).\n", + "\n", + "The variational energy of the trial wavefunction as a function of the number of cliffords used for shadow tomography is reported in Table S1, which we have reproduced here:\n", + "\n", + "| NCliffords | repeat 1 | repeat 2 | repeat 3 | repeat 4 |\n", + "|---|---|---|---|---|\n", + "| 10 | -1.800644 | -1.764747 | -1.813274 | -1.658202 |\n", + "| 16 | -1.823041 | -1.802192 | -1.840494 | -1.730591 |\n", + "| 28 | -1.906644 | -1.839835 | -1.843326 | -1.746749 |\n", + "| 47 | -1.925654 | -1.888527 | -1.860863 | -1.809656 |\n", + "| 80 | -1.909567 | -1.869456 | -1.887139 | -1.846339 |\n", + "| 136 | -1.930880 | -1.902309 | -1.889992 | -1.879164 |\n", + "| 229 | -1.944249 | -1.921523 | -1.903710 | -1.890947 |\n", + "| 387 | -1.947362 | -1.934682 | -1.910477 | -1.901883 |\n", + "| 652 | -1.952416 | -1.939853 | -1.912790 | -1.905250 |\n", + "| 1100 | -1.955544 | -1.944651 | -1.915073 | -1.909122 |\n", + "| 1856 | -1.955028 | -1.945966 | -1.909558 | -1.908038 |\n", + "| 3129 | -1.953877 | -1.947763 | -1.913386 | -1.908835 |\n", + "| 5276 | -1.954697 | -1.947323 | -1.912284 | -1.909315 |\n", + "| 8896 | -1.954930 | -1.947458 | -1.913889 | -1.913068 |\n", + "| 15000 | -1.954356 | -1.948894 | -1.913894 | -1.913082 |\n", + "\n", + "Each column represents an independented partitioned shadow tomography experiment.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can reproduce this table from the data by using [FQE](https://quantumai.google/openfermion/fqe) to read the wavefunctions and Hamiltonian and recompute the variational energy of the shadow trial wavefunction.\n", + "\n", + "First we need to parse the wavefunction and Hamiltonian before computing the energy using FQE:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import fqe\n", + "import h5py\n", + "import numpy as np\n", + "\n", + "def compute_energy(wfn_file: str, ham_file: str) -> float:\n", + " \"\"\"Compute the variational energy of the experimentally reconstructed wavefunction.\n", + "\n", + " Args:\n", + " wfn_file: The path to the FQE wavefunction. \n", + " ham_file: The path to the hamiltonian.\n", + "\n", + " Returns:\n", + " The variational energy\n", + " \"\"\"\n", + " fqe_wfn = fqe.wavefunction.Wavefunction()\n", + " fqe_wfn.read(filename=wfn_file)\n", + " with h5py.File(ham_file, 'r') as fh5:\n", + " ecore = fh5[\"e0\"][()]\n", + " h1e_act = fh5[\"h1\"][:]\n", + " eri_act = fh5[\"h2\"][:]\n", + " # get integrals into openfermion order\n", + " of_eris = np.transpose(eri_act, (0, 2, 3, 1))\n", + " # ... and then into FQE format\n", + " fqe_ham = fqe.hamiltonians.restricted_hamiltonian.RestrictedHamiltonian((h1e_act, np.einsum('ijlk', -0.5 * of_eris)), e_0=ecore)\n", + " return fqe_wfn.expectationValue(fqe_ham)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can glob all of the wavefunctions from the zenodo repo and recompute the variational energy:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import glob\n", + "import pandas as pd\n", + "for column in [1, 2, 3, 4]:\n", + " wavefunctions = glob.glob(f'qcqmc_data/h4_sto3g/fqe_wfns/column{column}/wfn*')\n", + " n_cliffords = [int(x.split('/')[-1].split('_')[-1]) for x in wavefunctions]\n", + " energies = [compute_energy(wfn_file, f'qcqmc_data/h4_sto3g/fqe_hams/column{column}/chem_ham.h5').real for wfn_file in wavefunctions]\n", + " if column == 1:\n", + " df = pd.DataFrame({'n_clifford': n_cliffords, f'repeat {column}': energies})\n", + " else:\n", + " new_df = pd.DataFrame({'n_clifford': n_cliffords, f'repeat {column}': energies})\n", + " df = pd.merge(df, new_df, on='n_clifford', how='outer')\n", + "\n", + "reproduced_table = df.sort_values(by='n_clifford')\n", + "print(reproduced_table)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Given these inputs we can also re-run AFQMC using the shadow wavefunctions as a trial wavefuncktion. To do so, we first need to parse the wavefunction and hamiltonian which is provided in ipie format in the zenodo repo: " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.qmc.afqmc import AFQMC\n", + "\n", + "num_elec = (2, 2) # H4 molecule, 4 electrons, ms = 0 \n", + "n_clifford = 15_000\n", + "ham_file = 'qcqmc_data/h4_sto3g/ipie_ham/column3/ham.h5'\n", + "wfn_file = f'qcqmc_data/h4_sto3g/ipie_wfns/column3/wfn_{n_clifford}.h5' \n", + "afqmc = AFQMC.build_from_hdf5(num_elec, ham_file, wfn_file, num_blocks=1_000, num_walkers=100)\n", + "afqmc.trial.calculate_energy(afqmc.system, afqmc.hamiltonian)\n", + "afqmc.run(estimator_filename=f'h4_sto3g_n_clifford_{n_clifford}.h5')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally we can analyze the result and compare it to the literature value which we agree with within error bars." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.analysis.extraction import extract_observable\n", + "import matplotlib.pyplot as plt\n", + "from ipie.analysis.blocking import reblock_minimal \n", + "\n", + "paper_result = -1.96922\n", + "analyzed_result = reblock_minimal(afqmc.estimators.filename, start_block=100)\n", + "notebook_energy = analyzed_result.ETotal_ac.values[0]\n", + "notebook_error = analyzed_result.ETotal_error_ac.values[0]\n", + "\n", + "data = extract_observable(afqmc.estimators.filename)\n", + "plt.plot(data.ETotal, marker='o', label=\"AFQMC\", color=\"C0\")\n", + "plt.axhline(paper_result, label=\"Exact Result\", color=\"C1\")\n", + "plt.axhline(notebook_energy, label=\"Notebook result\", color=\"C2\")\n", + "plt.legend()\n", + "plt.xlabel(\"Block number\")\n", + "plt.ylabel(\"Total Energy (Ha)\")\n", + "print(f\"paper result = {paper_result}\")\n", + "print(f\"notebook result = {notebook_energy} +/ {notebook_error}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/qcqmc/full_workflow.ipynb b/docs/qcqmc/full_workflow.ipynb new file mode 100644 index 00000000..5a6410b6 --- /dev/null +++ b/docs/qcqmc/full_workflow.ipynb @@ -0,0 +1,384 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# End-to-End Example\n", + "\n", + "We learned in the [high-level](high-level.ipynb) notebook how to setup the quantum side of the calculation, which involves specifying a Hamiltonian, defining a trial wavefunction ansatz before performing shadow tomography to extract the AFQMC trial wavefunction. In this example, we will repeat these steps again for the case of H4 which was studied in the [paper](https://arxiv.org/pdf/2106.16235.pdf). Then we will outline how to interface the output of the quantum half of the calculation with [ipie](https://github.com/JoonhoLee-Group/ipie/) to perform AFQMC with the quantum trial wavefunction." + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define an SCF Job\n", + "\n", + "We first setup a `PyscfHamiltonianParams` object which defines the SCF job we will run using [pyscf](https://github.com/pyscf/pyscf). Here we are simulation H4 in the (minimal) sto-3g basis." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.hamiltonian import PyscfHamiltonianParams, HamiltonianData\n", + "\n", + "pyscf_params = PyscfHamiltonianParams(\n", + " name=\"TEST_square_H4\",\n", + " n_orb=4,\n", + " n_elec=4,\n", + " geometry=((\"H\", (0, 0, 0)), (\"H\", (0, 0, 1.23)), (\"H\", (1.23, 0, 0)), (\"H\", (1.23, 0, 1.23))),\n", + " basis=\"sto3g\",\n", + " multiplicity=1,\n", + " charge=0,\n", + " save_chkfile=True,\n", + " overwrite_chk_file=True,\n", + ")\n", + "pyscf_hamiltonian = HamiltonianData.build_hamiltonian_from_pyscf(pyscf_params)\n", + "chk_path = pyscf_params.base_path.with_suffix(\".chk\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Build Perfect Pairing Wavefunction\n", + "\n", + "Next we build a perfect pairing wavefunction based upon our `pyscf_params`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from recirq.qcqmc.trial_wf import (\n", + " PerfectPairingPlusTrialWavefunctionParams,\n", + " TrialWavefunctionData\n", + ")\n", + "\n", + "pp_params = PerfectPairingPlusTrialWavefunctionParams(\n", + " 'pp_plus_test',\n", + " hamiltonian_params=pyscf_params,\n", + " heuristic_layers=tuple(),\n", + " do_pp=True,\n", + " restricted=False,\n", + " initial_orbital_rotation=None,\n", + " initial_two_body_qchem_amplitudes=np.asarray([0.3, 0.4]),\n", + " do_optimization=True,\n", + " use_fast_gradients=True\n", + ")\n", + "\n", + "trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies(\n", + " pp_params, dependencies={pyscf_params: pyscf_hamiltonian}, do_print=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can comare the wavefunction of the optimized ansatz to the exact ground state energy:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "print(f\"Trial wavefunction energy: {trial_wf.ansatz_energy}\")\n", + "print(f\"Exact ground state energy: {trial_wf.fci_energy}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we build an `experiment` which will combine the trial wavefunction circuit with that required for shadow tomography. In this case the experiment will be simulated using a statevector simulator." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.blueprint import BlueprintParamsTrialWf, BlueprintData\n", + "from recirq.qcqmc.experiment import SimulatedExperimentParams, ExperimentData\n", + "\n", + "blueprint_params = BlueprintParamsTrialWf(\n", + " name=\"blueprint_test_medium\",\n", + " trial_wf_params=pp_params,\n", + " n_cliffords=100,\n", + " qubit_partition=(tuple(qubit for qubit in pp_params.qubits_jordan_wigner_ordered),),\n", + " seed=1,\n", + ")\n", + "\n", + "blueprint = BlueprintData.build_blueprint_from_dependencies(blueprint_params, dependencies={pp_params: trial_wf})\n", + "\n", + "simulated_experiment_params = SimulatedExperimentParams(\n", + " name=\"test_1\",\n", + " blueprint_params=blueprint.params,\n", + " noise_model_name=\"None\",\n", + " noise_model_params=(0,),\n", + " n_samples_per_clifford=31,\n", + " seed=1,\n", + ")\n", + "experiment = ExperimentData.build_experiment_from_dependencies(\n", + " params=simulated_experiment_params, dependencies={blueprint.params: blueprint}\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The experimental output is post-processed to extract the reconstructed trial wavefunction:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from typing import Dict\n", + "from recirq.qcqmc.data import Params, Data\n", + "from recirq.qcqmc.analysis import OverlapAnalysisData, OverlapAnalysisParams\n", + "\n", + "analysis_params = OverlapAnalysisParams(\n", + " \"TEST_analysis\", experiment_params=experiment.params, k_to_calculate=(1,)\n", + ")\n", + "all_dependencies: Dict[Params, Data] = {\n", + " pyscf_params: pyscf_hamiltonian,\n", + " pp_params: trial_wf,\n", + " blueprint_params: blueprint,\n", + " simulated_experiment_params: experiment,\n", + "}\n", + "analysis = OverlapAnalysisData.build_analysis_from_dependencies(\n", + " analysis_params, dependencies=all_dependencies\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Finally, we save the wavefunction in a format that is acceptable for ipie." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.convert_to_ipie import save_wavefunction_for_ipie\n", + "ipie_data = save_wavefunction_for_ipie(\n", + " hamiltonian_data=pyscf_hamiltonian, trial_wf_data=trial_wf, overlap_analysis_data=analysis, do_print=False\n", + ")\n", + "print(f\"Reconstructed shadow wavefunction energy: {ipie_data.variational_energy}\")\n", + "print(f\"Ideal trial wavefunction energy: {trial_wf.ansatz_energy}\")\n", + "print(f\"FCI energy: {trial_wf.fci_energy}\")\n", + "print(ipie_data)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run AFQMC \n", + "\n", + "Now that we have built our quantum wavefunction, we can use it as a trial wavefunction in an AFQMC simulation. First, we need to build a factorized Hamiltonian which is required for AFQMC. \n", + "\n", + "In particular, we require the three-index Cholesky tensor (`ham.chol` below) which satisfies\n", + "\n", + "$$\n", + "(pq|rs) = \\sum_X L_{pq}^X L_{rs}^X.\n", + "$$" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.systems.generic import Generic\n", + "from ipie.utils.from_pyscf import generate_hamiltonian_from_chk\n", + "\n", + "num_elec = (pyscf_params.n_elec // 2,) * 2\n", + "system = Generic(num_elec)\n", + "chk_path = pyscf_params.base_path.with_suffix(\".chk\")\n", + "ham = generate_hamiltonian_from_chk(str(chk_path))\n", + "assert ham.H1.shape == (2, pyscf_params.n_orb, pyscf_params.n_orb)\n", + "assert ham.chol.shape[0] == pyscf_params.n_orb * pyscf_params.n_orb" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we need to build a trial wavefunction from quantum wavefunction. In practice, the quantum trial is expanded as a linear combination of (orthogonal) Slater Determinants. Within ipie, this style of trial wavefunction is defined as a `ParticleHole` trial wavefunction. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import h5py\n", + "from ipie.trial_wavefunction.particle_hole import ParticleHole\n", + "# Read quantum wavefunction from file. \n", + "with h5py.File(ipie_data.path, 'r') as fh5:\n", + " coeffs = fh5[\"coeffs_rotated\"][:]\n", + " occa = fh5[\"occa_rotated\"][:]\n", + " occb = fh5[\"occb_rotated\"][:]\n", + "wfn = ParticleHole((coeffs, occa, occb), num_elec, pyscf_params.n_orb)\n", + "wfn.half_rotate(ham)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "wfn.calculate_energy(system, ham)\n", + "print(f\"Trial wavefunction variational energy in ipie: {wfn.energy.real}\")\n", + "assert np.isclose(wfn.energy.real, ipie_data.variational_energy)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the variational energy might differ slightly from the result compute from the previous section. This is because the cholesky factorization uses a threshold of $1\\times10^{-5}$ a stopping criteria for convergence. Reducing the parameter `chol_cut` in `generate_hamiltonian_from_chk` will yield better agreement.\n", + "\n", + "At this point, we are ready to run AFQMC. We need to build an `AFQMC` driver class which takes as input the factorized Hamiltonian and our `ParticleHole` trial wavefunction." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.qmc.afqmc import AFQMC\n", + "\n", + "qmc_driver = AFQMC.build(num_elec, ham, wfn, num_blocks=300, num_walkers=50, seed=7)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.config import OUTDIRS\n", + "import pathlib\n", + "ipie_path = pathlib.Path(pyscf_hamiltonian.params.path_prefix + OUTDIRS.DEFAULT_QMC_DIRECTORY)\n", + "if not ipie_path.is_dir():\n", + " ipie_path.mkdir(parents=True)\n", + "qmc_driver.run(estimator_filename=f'{ipie_path}/estimates.h5')" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we can visualize the data which is by default saved to `estimates.0.h5`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.analysis.extraction import extract_observable\n", + "import matplotlib.pyplot as plt\n", + "data = extract_observable(qmc_driver.estimators.filename)\n", + "plt.plot(data.ETotal, marker='o', label=\"AFQMC\", color=\"C0\")\n", + "plt.axhline(-1.969512, label=\"Exact Result\", color=\"C1\")\n", + "plt.legend()\n", + "plt.xlabel(\"Block number\")\n", + "plt.ylabel(\"Total Energy (Ha)\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Through visual inspection, we can determine when the calculation has equilibrated, after which we should be sampling the (approximate) ground state. We can use samples from this point on to estimate the AFQMC energy and error bar. The function `reblock_minimal` will perform the necessary error analysis taking into account the serial temporal correlation in the AFQMC data. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ipie.analysis.blocking import reblock_minimal \n", + "reblock_minimal(qmc_driver.estimators.filename, start_block=20)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that the number of walkers and the number of blocks is probably too low to obtain statistically significant results and we would advise increasing both of these. For larger scale simulations, we can use MPI to distribute the work among many MPI processes. See [ipie](https://github.com/JoonhoLee-Group/ipie/) for further details." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Next steps\n", + "\n", + "The results presented above are far worse than reported in the paper. In practice one needs to:\n", + "\n", + "1. Optimize the trial wavefunction parameters\n", + "2. Converge the reconstructed shadow wavefunction with respect to: `n_cliffords`\n", + "3. The AFQMC part of the simulations can be sped up using MPI parallelism. See [ipie](https://github.com/JoonhoLee-Group/ipie) for more information. The number of walkers should also be increased to obtain better statistics and reduce any population control biases. The number of samples (`num_blocks`) should also be increased (`ETotal_nsamp_ac` should be about 100 for statistically significant results.) " + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "qcqmc", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/qcqmc/high_level.ipynb b/docs/qcqmc/high_level.ipynb new file mode 100644 index 00000000..0e581a59 --- /dev/null +++ b/docs/qcqmc/high_level.ipynb @@ -0,0 +1,286 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "679c5fd1", + "metadata": {}, + "source": [ + "# QCQMC\n", + "\n", + "This notebook demonstrates the high-level code interface used to run the quantum part of https://arxiv.org/pdf/2106.16235.pdf.\n", + "\n", + "The code is organized into five steps, where the input of each step is specified by one or more `[Name]Params` dataclasses and the output of each step is an `attrs` dataclass named `[Name]Data` with a factory method `build_...`. Each of the steps are demonstrated below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16e16876", + "metadata": {}, + "outputs": [], + "source": [ + "import attrs\n", + "\n", + "def print_fields(x):\n", + " \"\"\"Helper function to inspect returned objects' fields.\"\"\"\n", + " for k, v in attrs.asdict(x).items():\n", + " print(f'{k}: {type(v)}')" + ] + }, + { + "cell_type": "markdown", + "id": "00d220ee", + "metadata": {}, + "source": [ + "## Hamiltonian\n", + "\n", + "The first step is to pick a molecule and generate or load its relevant physical properties, namely the Hamiltonian. Here we specify a 2-electron Fermi-Hubbard Hamiltonian in the sto-3g [basis](https://en.wikipedia.org/wiki/STO-nG_basis_sets). Integral data is stored in the data/ file in this repository.\n", + "\n", + "The resulting data consists of the one- and two-body integrals and some energy quantities." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f003aaa5", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.hamiltonian import HamiltonianFileParams\n", + "hamiltonian_params = HamiltonianFileParams(\n", + " name='4q_pp',\n", + " integral_key='fh_sto3g',\n", + " n_orb=2,\n", + " n_elec=2,\n", + ")\n", + "hamiltonian_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d0fc1f3", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.hamiltonian import HamiltonianData\n", + "hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(hamiltonian_params)\n", + "print_fields(hamiltonian_data)" + ] + }, + { + "cell_type": "markdown", + "id": "74230882", + "metadata": {}, + "source": [ + "## Trial Wavefunction\n", + "\n", + "Next, we specify a trial wavefunction. Here we request a perfect pairing trial (using the specialized Params class) and don't include any heuristic layers (to keep the example simple and the runtime short).\n", + "\n", + "The output data includes parameterized circuits and their parameters." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ef084640", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.trial_wf import PerfectPairingPlusTrialWavefunctionParams\n", + "trial_wf_params = PerfectPairingPlusTrialWavefunctionParams(\n", + " name='4q_pp',\n", + " hamiltonian_params=hamiltonian_params,\n", + " heuristic_layers=(),\n", + ")\n", + "trial_wf_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a942ca71", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.trial_wf import TrialWavefunctionData \n", + "trial_wf_data = TrialWavefunctionData.build_pp_plus_trial_from_dependencies(\n", + " trial_wf_params, \n", + " dependencies={\n", + " hamiltonian_params: hamiltonian_data\n", + " }\n", + ")\n", + "print('--'*20)\n", + "print_fields(trial_wf_data)" + ] + }, + { + "cell_type": "markdown", + "id": "f536a0d8", + "metadata": {}, + "source": [ + "## Blueprint\n", + "\n", + "Next, we configure the shadow tomography strategy for measuring the trial wavefunction. We specify how many cliffords and how to generate them, i.e. the qubit partition.\n", + "\n", + "The returned data is a compiled circuit with parameterized clifford suffixes and Cirq resolvers for efficient execution on a device." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f43b9a12", + "metadata": {}, + "outputs": [], + "source": [ + "qubits = trial_wf_params.qubits_linearly_connected" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b45591cc", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.blueprint import BlueprintParamsTrialWf\n", + "\n", + "blueprint_params = BlueprintParamsTrialWf(\n", + " name='4q_pp',\n", + " trial_wf_params=trial_wf_params,\n", + " n_cliffords=100,\n", + " qubit_partition=tuple((q,) for q in qubits),\n", + ")\n", + "blueprint_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d00ea516", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.blueprint import BlueprintData \n", + "blueprint_data = BlueprintData.build_blueprint_from_dependencies(\n", + " blueprint_params,\n", + " dependencies={\n", + " trial_wf_params: trial_wf_data\n", + " })\n", + "print_fields(blueprint_data)" + ] + }, + { + "cell_type": "markdown", + "id": "d34b627e", + "metadata": {}, + "source": [ + "## Experiment\n", + "\n", + "Now, we're ready to execute circuits and gather samples. The experiment step has two versions: simulated or on a real device. In either case, we configure how many samples to collect and any runtime-specific parameters.\n", + "\n", + "The returned data includes the experimental samples." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86ef4223", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.experiment import SimulatedExperimentParams\n", + "expt_params = SimulatedExperimentParams(\n", + " name='4q_pp',\n", + " blueprint_params=blueprint_params,\n", + " n_samples_per_clifford=1_000,\n", + " noise_model_name='None',\n", + ")\n", + "expt_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2e1a569", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "from recirq.qcqmc.experiment import ExperimentData \n", + "expt_data = ExperimentData.build_experiment_from_dependencies(\n", + " expt_params,\n", + " dependencies={\n", + " blueprint_params: blueprint_data\n", + "})\n", + "print_fields(expt_data)" + ] + }, + { + "cell_type": "markdown", + "id": "6d8278e5", + "metadata": {}, + "source": [ + "## Overlap Analysis\n", + "\n", + "Finally, we reconstruct the wavefunction from our experiment data.\n", + "\n", + "The returned data here are reconstructed wavefunctions suitable for exporting to iPie for classical QCQMC walker updates." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a938f2ef", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.analysis import OverlapAnalysisParams\n", + "analysis_params = OverlapAnalysisParams(\n", + " name='4q_pp',\n", + " experiment_params=expt_params,\n", + " k_to_calculate=(1, 2),\n", + ")\n", + "analysis_params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "db222311", + "metadata": {}, + "outputs": [], + "source": [ + "from recirq.qcqmc.analysis import OverlapAnalysisData\n", + "analysis_data = OverlapAnalysisData.build_analysis_from_dependencies(analysis_params, dependencies={\n", + " expt_params: expt_data,\n", + " blueprint_params: blueprint_data,\n", + " trial_wf_params: trial_wf_data,\n", + "})\n", + "print_fields(analysis_data)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/recirq/qcqmc/hamiltonian.py b/recirq/qcqmc/hamiltonian.py index 97f5b7e2..6fa3845c 100644 --- a/recirq/qcqmc/hamiltonian.py +++ b/recirq/qcqmc/hamiltonian.py @@ -84,9 +84,7 @@ class HamiltonianFileParams(data.Params): n_orb: The number of spatial orbitals. n_elec: The total number of electrons. do_eri_restore: Whether to restore 8-fold symmetry to the integrals. - path_prefix: An optional path string to prepend to the default - hamiltonian data directory (by default the integrals will be written / - read to ./data/hamiltonians.) + path_prefix: An optional path string to prepend to the default hamiltonian path. """ name: str @@ -101,9 +99,7 @@ def _json_dict_(self): @property def path_string(self) -> str: - return ( - self.path_prefix + config.OUTDIRS.DEFAULT_HAMILTONIAN_DIRECTORY + self.name - ) + return self.path_prefix + config.OUTDIRS.DEFAULT_HAMILTONIAN_DIRECTORY + self.name @attrs.frozen @@ -129,7 +125,6 @@ class PyscfHamiltonianParams(data.Params): will be allowed to overwrite the previously saved chk file. Otherwise, if save_chkfile is True and overwrite_chk_file is False, then we raise a FileExistsError if the chk file already exists. - path_prefix: A prefix to prepend to the path where the data will be saved (if any) """ name: str @@ -153,9 +148,7 @@ def _json_dict_(self): @property def path_string(self) -> str: """Output path string""" - return ( - self.path_prefix + config.OUTDIRS.DEFAULT_HAMILTONIAN_DIRECTORY + self.name - ) + return self.path_prefix + config.OUTDIRS.DEFAULT_HAMILTONIAN_DIRECTORY + self.name @property def chk_path(self) -> Path: @@ -229,6 +222,107 @@ def get_restricted_fqe_hamiltonian(self) -> RestrictedHamiltonian: h1e=self.one_body_integrals, h2e=self.two_body_integrals_pqrs ) + @classmethod + def build_hamiltonian_from_file( + cls, + params: HamiltonianFileParams, + ) -> 'HamiltonianData': + """Function for loading a Hamiltonian from a file. + + Args: + params: parameters defining the hamiltonian. + + Returns: + properly constructed HamiltonianFileParams object. + """ + filepath = data.get_integrals_path(name=params.integral_key) + e_core, one_body_integrals, two_body_integrals_psqr, e_fci = load_integrals( + filepath=filepath + ) + + n_orb = params.n_orb + + if params.do_eri_restore: + two_body_integrals_psqr = ao2mo.restore(1, two_body_integrals_psqr, n_orb) + # This step may be necessary depending on the format in which the integrals were stored. + + # We have to reshape to a four index tensor and reorder the indices from + # chemist's ordering to physicist's. + two_body_integrals_psqr = two_body_integrals_psqr.reshape( + (n_orb, n_orb, n_orb, n_orb) + ) + two_body_integrals_pqrs = np.einsum("psqr->pqrs", two_body_integrals_psqr) + + fqe_ham = integrals_to_fqe_restricted( + h1e=one_body_integrals, h2e=two_body_integrals_pqrs + ) + + initial_wf = fqe.Wavefunction([[params.n_elec, 0, params.n_orb]]) + initial_wf.set_wfn(strategy="hartree-fock") + e_hf = _cast_to_float(initial_wf.expectationValue(fqe_ham) + e_core) + + return cls( + params=params, + e_core=e_core, + one_body_integrals=one_body_integrals, + two_body_integrals_pqrs=two_body_integrals_pqrs, + e_hf=e_hf, + e_fci=e_fci, + ) + + @classmethod + def build_hamiltonian_from_pyscf( + cls, params: PyscfHamiltonianParams + ) -> 'HamiltonianData': + """Construct a HamiltonianData object from pyscf molecule. + + Runs a RHF or ROHF simulation, performs an integral transformation and computes the FCI energy. + + Args: + params: A PyscfHamiltonianParams object specifying the molecule. pyscf_molecule is required. + + Returns: + A HamiltonianData object. + """ + molecule = params.pyscf_molecule + + if params.rhf: + pyscf_scf = scf.RHF(molecule) + else: + pyscf_scf = scf.ROHF(molecule) + + pyscf_scf.verbose = params.verbose_scf + if params.save_chkfile: + if not params.overwrite_chk_file: + if params.chk_path.exists(): + raise FileExistsError( + f"A chk file already exists at {params.chk_path}" + ) + pyscf_scf.chkfile = str(params.chk_path.resolve()) + pyscf_scf = pyscf_scf.newton() + pyscf_scf.run() + + e_hf = float(pyscf_scf.e_tot) + + one_body_integrals, two_body_integrals = compute_integrals( + pyscf_molecule=molecule, pyscf_scf=pyscf_scf + ) + + e_core = float(molecule.energy_nuc()) + + pyscf_fci = fci.FCI(molecule, pyscf_scf.mo_coeff) + pyscf_fci.verbose = 0 + e_fci = pyscf_fci.kernel()[0] + + return cls( + params=params, + e_core=e_core, + one_body_integrals=one_body_integrals, + two_body_integrals_pqrs=two_body_integrals, + e_hf=e_hf, + e_fci=e_fci, + ) + def load_integrals( *, filepath: Union[str, Path] @@ -303,99 +397,3 @@ def spinorb_from_spatial( def _cast_to_float(x: np.complex_, tol=1e-8) -> float: assert np.abs(x.imag) < tol, "Large imaginary component found." return float(x.real) - - -def build_hamiltonian_from_file( - params: HamiltonianFileParams, -) -> HamiltonianData: - """Function for loading a Hamiltonian from a file. - - Args: - params: parameters defining the hamiltonian. - - Returns: - properly constructed HamiltonianFileParams object. - """ - filepath = data.get_integrals_path(name=params.integral_key) - e_core, one_body_integrals, two_body_integrals_psqr, e_fci = load_integrals( - filepath=filepath - ) - - n_orb = params.n_orb - - if params.do_eri_restore: - two_body_integrals_psqr = ao2mo.restore(1, two_body_integrals_psqr, n_orb) - # This step may be necessary depending on the format in which the integrals were stored. - - # We have to reshape to a four index tensor and reorder the indices from - # chemist's ordering to physicist's. - two_body_integrals_psqr = two_body_integrals_psqr.reshape( - (n_orb, n_orb, n_orb, n_orb) - ) - two_body_integrals_pqrs = np.einsum("psqr->pqrs", two_body_integrals_psqr) - - fqe_ham = integrals_to_fqe_restricted( - h1e=one_body_integrals, h2e=two_body_integrals_pqrs - ) - - initial_wf = fqe.Wavefunction([[params.n_elec, 0, params.n_orb]]) - initial_wf.set_wfn(strategy="hartree-fock") - e_hf = _cast_to_float(initial_wf.expectationValue(fqe_ham) + e_core) - - return HamiltonianData( - params=params, - e_core=e_core, - one_body_integrals=one_body_integrals, - two_body_integrals_pqrs=two_body_integrals_pqrs, - e_hf=e_hf, - e_fci=e_fci, - ) - - -def build_hamiltonian_from_pyscf(params: PyscfHamiltonianParams) -> HamiltonianData: - """Construct a HamiltonianData object from pyscf molecule. - - Runs a RHF or ROHF simulation, performs an integral transformation and computes the FCI energy. - - Args: - params: A PyscfHamiltonianParams object specifying the molecule. pyscf_molecule is required. - - Returns: - A HamiltonianData object. - """ - molecule = params.pyscf_molecule - - if params.rhf: - pyscf_scf = scf.RHF(molecule) - else: - pyscf_scf = scf.ROHF(molecule) - - pyscf_scf.verbose = params.verbose_scf - if params.save_chkfile: - if not params.overwrite_chk_file: - if params.chk_path.exists(): - raise FileExistsError(f"A chk file already exists at {params.chk_path}") - pyscf_scf.chkfile = str(params.chk_path.resolve()) - pyscf_scf = pyscf_scf.newton() - pyscf_scf.run() - - e_hf = float(pyscf_scf.e_tot) - - one_body_integrals, two_body_integrals = compute_integrals( - pyscf_molecule=molecule, pyscf_scf=pyscf_scf - ) - - e_core = float(molecule.energy_nuc()) - - pyscf_fci = fci.FCI(molecule, pyscf_scf.mo_coeff) - pyscf_fci.verbose = 0 - e_fci = pyscf_fci.kernel()[0] - - return HamiltonianData( - params=params, - e_core=e_core, - one_body_integrals=one_body_integrals, - two_body_integrals_pqrs=two_body_integrals, - e_hf=e_hf, - e_fci=e_fci, - ) diff --git a/recirq/qcqmc/hamiltonian_test.py b/recirq/qcqmc/hamiltonian_test.py index 9c474282..e703cad9 100644 --- a/recirq/qcqmc/hamiltonian_test.py +++ b/recirq/qcqmc/hamiltonian_test.py @@ -14,13 +14,17 @@ import cirq import numpy as np import pytest -from openfermion import (get_fermion_operator, get_ground_state, - get_number_preserving_sparse_operator) +from openfermion import ( + get_fermion_operator, + get_ground_state, + get_number_preserving_sparse_operator, +) -from recirq.qcqmc.hamiltonian import (HamiltonianFileParams, - PyscfHamiltonianParams, - build_hamiltonian_from_file, - build_hamiltonian_from_pyscf) +from recirq.qcqmc.hamiltonian import ( + HamiltonianData, + HamiltonianFileParams, + PyscfHamiltonianParams, +) def test_load_from_file_hamiltonian_runs(): @@ -28,7 +32,7 @@ def test_load_from_file_hamiltonian_runs(): name="test hamiltonian", integral_key="fh_sto3g", n_orb=2, n_elec=2 ) - hamiltonian_data = build_hamiltonian_from_file(params) + hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(params) assert hamiltonian_data.one_body_integrals.shape == (2, 2) assert hamiltonian_data.two_body_integrals_pqrs.shape == (2, 2, 2, 2) @@ -62,7 +66,7 @@ def test_hamiltonian_energy_consistent( do_eri_restore=do_eri_restore, ) - hamiltonian_data = build_hamiltonian_from_file(params) + hamiltonian_data = HamiltonianData.build_hamiltonian_from_file(params) molecular_hamiltonian = hamiltonian_data.get_molecular_hamiltonian() @@ -90,7 +94,7 @@ def test_pyscf_h4_consistent_with_file(): charge=0, ) - pyscf_hamiltonian = build_hamiltonian_from_pyscf(pyscf_params) + pyscf_hamiltonian = HamiltonianData.build_hamiltonian_from_pyscf(pyscf_params) from_file_params = HamiltonianFileParams( name="test hamiltonian", @@ -100,7 +104,9 @@ def test_pyscf_h4_consistent_with_file(): do_eri_restore=False, ) - from_file_hamiltonian = build_hamiltonian_from_file(from_file_params) + from_file_hamiltonian = HamiltonianData.build_hamiltonian_from_file( + from_file_params + ) np.testing.assert_almost_equal( pyscf_hamiltonian.e_core, from_file_hamiltonian.e_core, decimal=10 @@ -134,11 +140,11 @@ def test_pyscf_saves_chk_without_overwrite(tmp_path): chk_path = pyscf_params.base_path.with_suffix(".chk") chk_path.unlink(missing_ok=True) - build_hamiltonian_from_pyscf(pyscf_params) + HamiltonianData.build_hamiltonian_from_pyscf(pyscf_params) assert chk_path.exists() with pytest.raises(FileExistsError): - pyscf_hamiltonian = build_hamiltonian_from_pyscf(pyscf_params) + pyscf_hamiltonian = HamiltonianData.build_hamiltonian_from_pyscf(pyscf_params) chk_path.unlink() @@ -165,7 +171,7 @@ def test_pyscf_saves_chk_with_overwrite(tmp_path): chk_path = pyscf_params.base_path.with_suffix(".chk") chk_path.unlink(missing_ok=True) - build_hamiltonian_from_pyscf(pyscf_params) + HamiltonianData.build_hamiltonian_from_pyscf(pyscf_params) assert chk_path.exists() diff --git a/recirq/qcqmc/optimize_wf_test.py b/recirq/qcqmc/optimize_wf_test.py index fba1a78e..23419b38 100644 --- a/recirq/qcqmc/optimize_wf_test.py +++ b/recirq/qcqmc/optimize_wf_test.py @@ -22,14 +22,16 @@ from recirq.qcqmc.hamiltonian import HamiltonianData from recirq.qcqmc.layer_spec import LayerSpec from recirq.qcqmc.optimize_wf import ( - build_pp_plus_trial_wavefunction, compute_finite_difference_grad, evaluate_energy_and_gradient, get_ansatz_qubit_wf, get_evolved_wf, get_two_body_params_from_qchem_amplitudes, ) -from recirq.qcqmc.trial_wf import PerfectPairingPlusTrialWavefunctionParams +from recirq.qcqmc.trial_wf import ( + PerfectPairingPlusTrialWavefunctionParams, + TrialWavefunctionData, +) def test_pp_wf_energy(fixture_4_qubit_ham: HamiltonianData): @@ -41,7 +43,7 @@ def test_pp_wf_energy(fixture_4_qubit_ham: HamiltonianData): restricted=True, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_4_qubit_ham.params: fixture_4_qubit_ham} ) @@ -57,7 +59,7 @@ def test_pp_wf_energy_with_layer(fixture_4_qubit_ham: HamiltonianData): restricted=True, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_4_qubit_ham.params: fixture_4_qubit_ham} ) @@ -82,7 +84,7 @@ def test_qchem_pp_eight_qubit_wavefunctions_consistent( do_optimization=False, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_8_qubit_ham.params: fixture_8_qubit_ham}, do_print=False, @@ -108,7 +110,7 @@ def test_pp_plus_wf_energy_sloppy_1(fixture_8_qubit_ham: HamiltonianData): random_parameter_scale=1, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_8_qubit_ham.params: fixture_8_qubit_ham}, do_print=False, @@ -129,7 +131,7 @@ def test_diamond_pp_wf_energy(fixture_12_qubit_ham: HamiltonianData): n_optimization_restarts=1, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_12_qubit_ham.params: fixture_12_qubit_ham}, do_print=False, @@ -203,7 +205,7 @@ def test_qchem_pp_runs( do_optimization=False, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_4_qubit_ham.params: fixture_4_qubit_ham}, do_print=False, @@ -233,7 +235,7 @@ def test_qchem_conversion_negative(fixture_4_qubit_ham: HamiltonianData): do_optimization=False, ) - trial_wf = build_pp_plus_trial_wavefunction( + trial_wf = TrialWavefunctionData.build_pp_plus_trial_from_dependencies( params, dependencies={fixture_4_qubit_ham.params: fixture_4_qubit_ham}, do_print=False, diff --git a/recirq/qcqmc/trial_wf.py b/recirq/qcqmc/trial_wf.py index f96f220f..7e6a5eea 100644 --- a/recirq/qcqmc/trial_wf.py +++ b/recirq/qcqmc/trial_wf.py @@ -24,6 +24,7 @@ import scipy.sparse from recirq.qcqmc import ( + afqmc_circuits, bitstrings, config, data, @@ -212,6 +213,100 @@ def _json_dict_(self): simple_dict["params"] = self.params return simple_dict + @classmethod + def build_pp_plus_trial_from_dependencies( + cls, + params: PerfectPairingPlusTrialWavefunctionParams, + *, + dependencies: Dict[data.Params, data.Data], + do_print: bool = False, + ) -> "TrialWavefunctionData": + """Builds a TrialWavefunctionData from a TrialWavefunctionParams + + Args: + params: The parameters specifying the PP+ trial wavefunction. + dependencies: Dependencies required to construct the trial + wavefunction. In particular, a HamiltonianParams/Data key pair must + be provided. + do_print: If set to true then print debugging information to stdout + + Returns: + The constructed TrialWavefunctionData object. + """ + + # Avoid circular imports + from recirq.qcqmc import optimize_wf + + if do_print: + print("Building Trial Wavefunction") + np.random.seed(params.seed) + hamiltonian_data = dependencies[params.hamiltonian_params] + assert isinstance(hamiltonian_data, hamiltonian.HamiltonianData) + + if params.n_orb != params.n_elec: + raise ValueError("PP wavefunction must have n_orb = n_elec") + + if params.do_optimization: + ( + one_body_params, + two_body_params, + one_body_basis_change_mat, + ) = optimize_wf.get_pp_plus_params( + hamiltonian_data=hamiltonian_data, + restricted=params.restricted, + random_parameter_scale=params.random_parameter_scale, + initial_orbital_rotation=params.initial_orbital_rotation, + heuristic_layers=params.heuristic_layers, + do_pp=params.do_pp, + n_optimization_restarts=params.n_optimization_restarts, + do_print=do_print, + use_fast_gradients=params.use_fast_gradients, + ) + else: + if params.initial_two_body_qchem_amplitudes is None: + raise NotImplementedError( + "if initial_two_body_qchem_amplitudes must be set if do_optimization is False." + ) + if params.initial_orbital_rotation: + raise NotImplementedError( + "Initial oribtal rotation must be None if do_optimization is False." + ) + + n_one_body_params = params.n_orb * (params.n_orb - 1) + one_body_params = np.zeros(n_one_body_params) + one_body_basis_change_mat = np.diag(np.ones(params.n_orb * 2)) + two_body_params = optimize_wf.get_two_body_params_from_qchem_amplitudes( + params.initial_two_body_qchem_amplitudes + ) + + (superposition_circuit, ansatz_circuit) = afqmc_circuits.get_circuits( + two_body_params=two_body_params, + n_orb=params.n_orb, + n_elec=params.n_elec, + heuristic_layers=params.heuristic_layers, + ) + + ansatz_energy, hf_energy = optimize_wf.get_and_check_energy( + hamiltonian_data=hamiltonian_data, + ansatz_circuit=ansatz_circuit, + params=params, + one_body_params=one_body_params, + two_body_params=two_body_params, + one_body_basis_change_mat=one_body_basis_change_mat, + ) + + return cls( + params=params, + ansatz_circuit=ansatz_circuit, + superposition_circuit=superposition_circuit, + hf_energy=hf_energy, + ansatz_energy=ansatz_energy, + fci_energy=hamiltonian_data.e_fci, + one_body_basis_change_mat=one_body_basis_change_mat, + one_body_params=one_body_params, + two_body_params=two_body_params, + ) + def get_rotated_hamiltonians( *,