Skip to content
This repository was archived by the owner on Dec 7, 2021. It is now read-only.

Implement selection of optimizer in the QGAN algorithm #1253

Merged
merged 20 commits into from
Oct 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions qiskit/aqua/algorithms/distribution_learners/qgan.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ def set_generator(self, generator_circuit: Optional[Union[QuantumCircuit,
"""
self._generator = QuantumGenerator(self._bounds, self._num_qubits,
generator_circuit, generator_init_params,
generator_optimizer,
self._snapshot_dir)

@property
Expand Down
40 changes: 32 additions & 8 deletions qiskit/aqua/components/neural_networks/quantum_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,16 @@
"""Quantum Generator."""

from typing import Optional, List, Union, Dict, Any
import warnings
from copy import deepcopy
import warnings

import numpy as np

from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
from qiskit.circuit.library import TwoLocal
from qiskit.aqua import aqua_globals
from qiskit.aqua.components.optimizers import ADAM
from qiskit.aqua.components.optimizers import Optimizer
from qiskit.aqua.components.uncertainty_models import UnivariateVariationalDistribution, \
MultivariateVariationalDistribution
from qiskit.aqua.components.neural_networks.generative_network import GenerativeNetwork
Expand All @@ -47,6 +49,7 @@ def __init__(self,
MultivariateVariationalDistribution,
QuantumCircuit]] = None,
init_params: Optional[Union[List[float], np.ndarray]] = None,
optimizer: Optional[Optimizer] = None,
snapshot_dir: Optional[str] = None) -> None:
"""
Args:
Expand All @@ -59,6 +62,7 @@ def __init__(self,
or a QuantumCircuit implementing the generator.
init_params: 1D numpy array or list, Initialization for
the generator's parameters.
optimizer: optimizer to be used for the training of the generator
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are there any specific aspects or configuration of the optimizer that we could add here to the docstring to inform the user? For instance I see that the default optimizer just has one iteration.

Also in train I see this code that I think will fail if another optimizer is supplied.

        self._optimizer._maxiter = 1
        self._optimizer._t = 0

Could we have a test case with different optimizer to assure things work.

snapshot_dir: str or None, if not None save the optimizer's parameter after every
update step to the given directory

Expand Down Expand Up @@ -96,9 +100,12 @@ def __init__(self,
self._bound_parameters = init_params

# Set optimizer for updating the generator network
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-3, beta_1=0.7,
beta_2=0.99, noise_factor=1e-6,
eps=1e-6, amsgrad=True, snapshot_dir=snapshot_dir)
if optimizer:
self._optimizer = optimizer
else:
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-3, beta_1=0.7,
beta_2=0.99, noise_factor=1e-6,
eps=1e-6, amsgrad=True, snapshot_dir=snapshot_dir)

if np.ndim(self._bounds) == 1:
bounds = np.reshape(self._bounds, (1, len(self._bounds)))
Expand Down Expand Up @@ -312,11 +319,28 @@ def train(self, quantum_instance=None, shots=None):
Returns:
dict: generator loss(float) and updated parameters (array).
"""

self._shots = shots
# Force single optimization iteration
self._optimizer._maxiter = 1
self._optimizer._t = 0

# TODO Improve access to maxiter, say via options getter, to avoid private member access
# and since not all optimizers have that exact naming figure something better as well to
# allow the checking below to not have to warn if it has something else and max iterations
# is truly 1 anyway.
try:
if self._optimizer._maxiter != 1:
warnings.warn('Please set the the optimizer maxiter argument to 1 '
'to ensure that the generator '
'and discriminator are updated in an alternating fashion.')
except AttributeError:
maxiter = self._optimizer._options.get('maxiter')
if maxiter is not None and maxiter != 1:
warnings.warn('Please set the the optimizer maxiter argument to 1 '
'to ensure that the generator '
'and discriminator are updated in an alternating fashion.')
elif maxiter is None:
warnings.warn('Please ensure the optimizer max iterations are set to 1 '
'to ensure that the generator '
'and discriminator are updated in an alternating fashion.')

objective = self._get_objective_function(quantum_instance, self._discriminator)
self._bound_parameters, loss, _ = self._optimizer.optimize(
num_vars=len(self._bound_parameters),
Expand Down
19 changes: 19 additions & 0 deletions test/aqua/test_qgan.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from qiskit.aqua.algorithms import QGAN
from qiskit.aqua import aqua_globals, QuantumInstance, MissingOptionalLibraryError
from qiskit.aqua.components.initial_states import Custom
from qiskit.aqua.components.optimizers import CG, COBYLA
from qiskit.aqua.components.neural_networks import NumPyDiscriminator, PyTorchDiscriminator
from qiskit import BasicAer

Expand Down Expand Up @@ -110,6 +111,24 @@ def test_sample_generation(self, circuit_type):
for i, weight_q in enumerate(weights_qasm):
self.assertAlmostEqual(weight_q, weights_statevector[i], delta=0.1)

def test_qgan_training_cg(self):
"""Test QGAN training."""
optimizer = CG(maxiter=1)
self.qgan.set_generator(generator_circuit=self.generator_circuit,
generator_optimizer=optimizer)
trained_statevector = self.qgan.run(self.qi_statevector)
trained_qasm = self.qgan.run(self.qi_qasm)
self.assertAlmostEqual(trained_qasm['rel_entr'], trained_statevector['rel_entr'], delta=0.1)

def test_qgan_training_cobyla(self):
"""Test QGAN training."""
optimizer = COBYLA(maxiter=1)
self.qgan.set_generator(generator_circuit=self.generator_circuit,
generator_optimizer=optimizer)
trained_statevector = self.qgan.run(self.qi_statevector)
trained_qasm = self.qgan.run(self.qi_qasm)
self.assertAlmostEqual(trained_qasm['rel_entr'], trained_statevector['rel_entr'], delta=0.1)

def test_qgan_training(self):
"""Test QGAN training."""
warnings.filterwarnings('ignore', category=DeprecationWarning)
Expand Down