From fcbe4ef8b0d4c1339b2b40f6883e9cfddd3b5622 Mon Sep 17 00:00:00 2001 From: Jim Garrison Date: Tue, 26 Nov 2024 20:44:02 -0800 Subject: [PATCH] Implement unitary AQC --- qiskit_addon_aqc_tensor/objective.py | 38 +++++++++++++++++++ .../simulation/quimb/__init__.py | 36 +++++++++++++++++- 2 files changed, 73 insertions(+), 1 deletion(-) diff --git a/qiskit_addon_aqc_tensor/objective.py b/qiskit_addon_aqc_tensor/objective.py index c7df421..3c3c75f 100644 --- a/qiskit_addon_aqc_tensor/objective.py +++ b/qiskit_addon_aqc_tensor/objective.py @@ -85,7 +85,45 @@ def target(self) -> TensorNetworkState: return self._target_tensornetwork +class MaximizeProcessFidelity: + """Maximize process fidelity.""" + + def __init__(self, target, ansatz: QuantumCircuit, settings: TensorNetworkSimulationSettings): + """Initialize the objective function. + + Args: + ansatz: Parametrized ansatz circuit. + target: Target state in tensor-network representation. + settings: Tensor network simulation settings. + """ + if ansatz is not None: + from .ansatz_generation import AnsatzBlock + + ansatz = ansatz.decompose(AnsatzBlock) + self._ansatz = ansatz + self._simulation_settings = settings + self._target_tensornetwork = target + if settings is not None: + from .simulation.abstract import _preprocess_for_gradient + + self._preprocessed = _preprocess_for_gradient(self, settings) + + def loss_function(self, x: np.ndarray) -> tuple[float, np.ndarray]: + """Evaluate ``(objective_value, gradient)`` of function at point ``x``.""" + from .simulation.abstract import _compute_objective_and_gradient + + return _compute_objective_and_gradient( + self, self._simulation_settings, self._preprocessed, x + ) + + @property + def target(self) -> TensorNetworkState: + """Target tensor network.""" + return self._target_tensornetwork + + # Reminder: update the RST file in docs/apidocs when adding new interfaces. __all__ = [ "OneMinusFidelity", + "MaximizeProcessFidelity", ] diff --git a/qiskit_addon_aqc_tensor/simulation/quimb/__init__.py b/qiskit_addon_aqc_tensor/simulation/quimb/__init__.py index f95afcd..4546a56 100644 --- a/qiskit_addon_aqc_tensor/simulation/quimb/__init__.py +++ b/qiskit_addon_aqc_tensor/simulation/quimb/__init__.py @@ -12,6 +12,7 @@ """Quimb as a tensor network backend.""" +# ruff: noqa: F811 from __future__ import annotations import logging @@ -27,7 +28,7 @@ from wrapt import register_post_import_hook from ...ansatz_generation import AnsatzBlock -from ...objective import OneMinusFidelity +from ...objective import MaximizeProcessFidelity, OneMinusFidelity from ..abstract import TensorNetworkSimulationSettings from ..explicit_gradient import ( compute_gradient_of_tensornetwork_overlap, @@ -405,6 +406,39 @@ def oneminusfidelity_loss_fn( return 1 - fidelity +@dispatch +def tnoptimizer_objective_kwargs(objective: MaximizeProcessFidelity, /) -> dict[str, Any]: + """Return keyword arguments for use with :func:`~quimb.tensor.TNOptimizer`. + + - ``loss_fn`` + - ``loss_kwargs`` + """ + import quimb.tensor as qtn + + target = objective.target + if isinstance(target, qtn.Circuit): + target = target.psi + return { + "loss_fn": maximizeprocessfidelity_loss_fn, + "loss_kwargs": {"target": target}, + } + + +def maximizeprocessfidelity_loss_fn( + circ: quimb.tensor.Circuit, + /, + *, + target: quimb.tensor.TensorNetworkGenVector, + optimize="auto-hq", +): + import autoray as ar + + return ( + 1 + - ar.do("abs", (circ.uni.H & target).contract(all, optimize=optimize)) / 2.0**target.nsites + ) + + # Reminder: update the RST file in docs/apidocs when adding new interfaces. __all__ = [ "is_quimb_available",