Skip to content

Commit

Permalink
Cleanup and bugfix to support different primitives. (#55) (#855)
Browse files Browse the repository at this point in the history
* Cleanup and bugfix for different primitives support (#55)

* Quick fix and lint for unit tests.

* Fixed a bug in ComputeUncompute and lint corrections.

* Fix formatting for algorithm tests

* Reformatting some variables to make lint compliant.

* Refactor: Cleanup code, preserve existing formatting, apply minor bug fixes, and update missing documentation

* Removing unsupported classes.

* Fix for lint

* Fix lint errors uncovered during workflow checks

* Adjust a unit test to accomodate noise-related variations
  • Loading branch information
OkuyanBoga authored Nov 18, 2024
1 parent c39bd8f commit 2f7e19c
Show file tree
Hide file tree
Showing 26 changed files with 1,506 additions and 1,792 deletions.
1 change: 1 addition & 0 deletions .pylintdict
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,7 @@ platt
polyfit
postprocess
powell
pragma
pre
precompute
precomputed
Expand Down
23 changes: 0 additions & 23 deletions qiskit_machine_learning/gradients/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,9 @@
:nosignatures:
BaseEstimatorGradient
BaseQGT
BaseSamplerGradient
EstimatorGradientResult
SamplerGradientResult
QGTResult
Linear Combination of Unitaries
-------------------------------
Expand All @@ -40,7 +38,6 @@
LinCombEstimatorGradient
LinCombSamplerGradient
LinCombQGT
Parameter Shift Rules
---------------------
Expand All @@ -52,16 +49,6 @@
ParamShiftEstimatorGradient
ParamShiftSamplerGradient
Quantum Fisher Information
--------------------------
.. autosummary::
:toctree: ../stubs/
:nosignatures:
QFIResult
QFI
Simultaneous Perturbation Stochastic Approximation
--------------------------------------------------
Expand All @@ -74,35 +61,25 @@
"""

from .base.base_estimator_gradient import BaseEstimatorGradient
from .base.base_qgt import BaseQGT
from .base.base_sampler_gradient import BaseSamplerGradient
from .base.estimator_gradient_result import EstimatorGradientResult
from .lin_comb.lin_comb_estimator_gradient import DerivativeType, LinCombEstimatorGradient
from .lin_comb.lin_comb_qgt import LinCombQGT
from .lin_comb.lin_comb_sampler_gradient import LinCombSamplerGradient
from .param_shift.param_shift_estimator_gradient import ParamShiftEstimatorGradient
from .param_shift.param_shift_sampler_gradient import ParamShiftSamplerGradient
from .qfi import QFI
from .qfi_result import QFIResult
from .base.qgt_result import QGTResult
from .base.sampler_gradient_result import SamplerGradientResult
from .spsa.spsa_estimator_gradient import SPSAEstimatorGradient
from .spsa.spsa_sampler_gradient import SPSASamplerGradient

__all__ = [
"BaseEstimatorGradient",
"BaseQGT",
"BaseSamplerGradient",
"DerivativeType",
"EstimatorGradientResult",
"LinCombEstimatorGradient",
"LinCombQGT",
"LinCombSamplerGradient",
"ParamShiftEstimatorGradient",
"ParamShiftSamplerGradient",
"QFI",
"QFIResult",
"QGTResult",
"SamplerGradientResult",
"SPSAEstimatorGradient",
"SPSASamplerGradient",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def __init__(
r"""
Args:
estimator: The estimator used to compute the gradients.
pass_manager: pass manager for isa_circuit transpilation.
options: Primitive backend runtime options used for circuit execution.
The order of priority is: options in ``run`` method > gradient's
default options > primitive's default setting.
Expand All @@ -71,6 +70,8 @@ def __init__(
Defaults to ``DerivativeType.REAL``, as this yields e.g. the commonly-used energy
gradient and this type is the only supported type for function-level schemes like
finite difference.
pass_manager: The pass manager to transpile the circuits if necessary.
Defaults to ``None``, as some primitives do not need transpiled circuits.
"""
if isinstance(estimator, BaseEstimatorV1):
issue_deprecation_msg(
Expand Down
Loading

0 comments on commit 2f7e19c

Please sign in to comment.