From c9d2f25a7c822413eb56385911ff928180abeb09 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 08:44:48 -0700 Subject: [PATCH 01/26] move get_backend/set_backend out of init, and refactor things slightly to support default_backend changing --- src/pyhf/__init__.py | 161 +++------------------------ src/pyhf/constraints.py | 3 +- src/pyhf/interpolators/code0.py | 3 +- src/pyhf/interpolators/code1.py | 3 +- src/pyhf/interpolators/code2.py | 3 +- src/pyhf/interpolators/code4.py | 3 +- src/pyhf/interpolators/code4p.py | 3 +- src/pyhf/modifiers/shapefactor.py | 4 +- src/pyhf/modifiers/shapesys.py | 4 +- src/pyhf/modifiers/staterror.py | 4 +- src/pyhf/optimize/common.py | 2 +- src/pyhf/optimize/mixins.py | 3 +- src/pyhf/parameters/paramsets.py | 6 +- src/pyhf/parameters/paramview.py | 10 +- src/pyhf/pdf.py | 9 +- src/pyhf/tensor/common.py | 11 +- src/pyhf/tensor/manager.py | 173 ++++++++++++++++++++++++++++++ 17 files changed, 229 insertions(+), 176 deletions(-) create mode 100644 src/pyhf/tensor/manager.py diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index a7d317fea3..72b4e27487 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,155 +1,8 @@ from pyhf.tensor import BackendRetriever as tensor -from pyhf.optimize import OptimizerRetriever as optimize +from pyhf.tensor.manager import get_backend +from pyhf.tensor.manager import set_backend from pyhf._version import version as __version__ from pyhf.exceptions import InvalidBackend, InvalidOptimizer, Unsupported -from pyhf import events - -tensorlib = None -optimizer = None - - -def get_backend(): - """ - Get the current backend and the associated optimizer - - Example: - >>> import pyhf - >>> backend, optimizer = pyhf.get_backend() - >>> backend - - >>> optimizer - - - Returns: - backend, optimizer - """ - global tensorlib - global optimizer - return tensorlib, optimizer - - -tensorlib = tensor.numpy_backend() -default_backend = tensorlib -optimizer = optimize.scipy_optimizer() -default_optimizer = optimizer - - -@events.register('change_backend') -def set_backend(backend, custom_optimizer=None, precision=None): - """ - Set the backend and the associated optimizer - - Example: - >>> import pyhf - >>> pyhf.set_backend("tensorflow") - >>> pyhf.tensorlib.name - 'tensorflow' - >>> pyhf.tensorlib.precision - '64b' - >>> pyhf.set_backend(b"pytorch", precision="32b") - >>> pyhf.tensorlib.name - 'pytorch' - >>> pyhf.tensorlib.precision - '32b' - >>> pyhf.set_backend(pyhf.tensor.numpy_backend()) - >>> pyhf.tensorlib.name - 'numpy' - >>> pyhf.tensorlib.precision - '64b' - - Args: - backend (:obj:`str` or `pyhf.tensor` backend): One of the supported pyhf backends: NumPy, TensorFlow, PyTorch, and JAX - custom_optimizer (`pyhf.optimize` optimizer): Optional custom optimizer defined by the user - precision (:obj:`str`): Floating point precision to use in the backend: ``64b`` or ``32b``. Default is backend dependent. - - Returns: - None - """ - global tensorlib - global optimizer - - _supported_precisions = ["32b", "64b"] - backend_kwargs = {} - - if isinstance(precision, (str, bytes)): - if isinstance(precision, bytes): - precision = precision.decode("utf-8") - precision = precision.lower() - - if isinstance(backend, (str, bytes)): - if isinstance(backend, bytes): - backend = backend.decode("utf-8") - backend = backend.lower() - - if precision is not None: - backend_kwargs["precision"] = precision - - try: - backend = getattr(tensor, f"{backend:s}_backend")(**backend_kwargs) - except TypeError: - raise InvalidBackend( - f"The backend provided is not supported: {backend:s}. Select from one of the supported backends: numpy, tensorflow, pytorch" - ) - - _name_supported = getattr(tensor, f"{backend.name:s}_backend") - if _name_supported: - if not isinstance(backend, _name_supported): - raise AttributeError( - f"'{backend.name:s}' is not a valid name attribute for backend type {type(backend)}\n Custom backends must have names unique from supported backends" - ) - if backend.precision not in _supported_precisions: - raise Unsupported( - f"The backend precision provided is not supported: {backend.precision:s}. Select from one of the supported precisions: {', '.join([str(v) for v in _supported_precisions])}" - ) - # If "precision" arg passed, it should always win - # If no "precision" arg, defer to tensor backend object API if set there - if precision is not None: - if backend.precision != precision: - backend_kwargs["precision"] = precision - backend = getattr(tensor, f"{backend.name:s}_backend")(**backend_kwargs) - - # need to determine if the tensorlib changed or the optimizer changed for events - tensorlib_changed = bool( - (backend.name != tensorlib.name) | (backend.precision != tensorlib.precision) - ) - optimizer_changed = False - - if custom_optimizer: - if isinstance(custom_optimizer, (str, bytes)): - if isinstance(custom_optimizer, bytes): - custom_optimizer = custom_optimizer.decode("utf-8") - try: - new_optimizer = getattr( - optimize, f"{custom_optimizer.lower()}_optimizer" - )() - except TypeError: - raise InvalidOptimizer( - f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit" - ) - else: - _name_supported = getattr(optimize, f"{custom_optimizer.name:s}_optimizer") - if _name_supported: - if not isinstance(custom_optimizer, _name_supported): - raise AttributeError( - f"'{custom_optimizer.name}' is not a valid name attribute for optimizer type {type(custom_optimizer)}\n Custom optimizers must have names unique from supported optimizers" - ) - new_optimizer = custom_optimizer - - else: - new_optimizer = optimize.scipy_optimizer() - - optimizer_changed = bool(optimizer != new_optimizer) - # set new backend - tensorlib = backend - optimizer = new_optimizer - # trigger events - if tensorlib_changed: - events.trigger("tensorlib_changed")() - if optimizer_changed: - events.trigger("optimizer_changed")() - # set up any other globals for backend - tensorlib._setup() - from pyhf.pdf import Model from pyhf.workspace import Workspace @@ -164,6 +17,7 @@ def set_backend(backend, custom_optimizer=None, precision=None): "Workspace", "__version__", "compat", + "default_backend", "exceptions", "get_backend", "infer", @@ -185,3 +39,12 @@ def set_backend(backend, custom_optimizer=None, precision=None): def __dir__(): return __all__ + + +def __getattr__(name): + if name == 'tensorlib': + return get_backend(default=False)[0] + elif name == 'optimizer': + return get_backend(default=False)[1] + elif name == 'default_backend': + return get_backend(default=True)[0] diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index 03ec7de448..39688a21a7 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -1,4 +1,5 @@ -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf import probability as prob from pyhf.parameters import ParamViewer diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index a7fef32aec..2874711da2 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -1,6 +1,7 @@ """Piecewise-linear Interpolation. (Code 0).""" import logging -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index 0f0b23e8d7..2dfb19f554 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -1,7 +1,8 @@ """Piecewise-Exponential Interpolation (Code 1).""" import logging import math -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index 0048c403fb..92c45f9d76 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -1,6 +1,7 @@ """Quadratic Interpolation (Code 2).""" import logging -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index df5bc3014a..d0b2b00c78 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -1,7 +1,8 @@ """Polynomial Interpolation (Code 4).""" import logging import math -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index 4f6869ccdd..b5671cb447 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -1,6 +1,7 @@ """Piecewise-Linear + Polynomial Interpolation (Code 4p).""" import logging -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events from pyhf.interpolators import _slow_interpolator_looper diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index d888d45102..00b34121f9 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -1,6 +1,8 @@ import logging -from pyhf import get_backend, default_backend, events +import pyhf +from pyhf import events +from pyhf.tensor.manager import get_backend from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index d8dfcfce3a..5fa3f608f9 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -1,6 +1,8 @@ import logging -from pyhf import get_backend, default_backend, events +import pyhf +from pyhf import events +from pyhf.tensor.manager import get_backend from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index ec741e2309..51518acc9e 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -1,6 +1,8 @@ import logging -from pyhf import get_backend, default_backend, events +import pyhf +from pyhf import events +from pyhf.tensor.manager import get_backend from pyhf.parameters import ParamViewer log = logging.getLogger(__name__) diff --git a/src/pyhf/optimize/common.py b/src/pyhf/optimize/common.py index 65dd04582f..61eeafd889 100644 --- a/src/pyhf/optimize/common.py +++ b/src/pyhf/optimize/common.py @@ -1,5 +1,5 @@ """Common Backend Shim to prepare minimization for optimizer.""" -from pyhf import get_backend +from pyhf.tensor.manager import get_backend from pyhf.tensor.common import _TensorViewer diff --git a/src/pyhf/optimize/mixins.py b/src/pyhf/optimize/mixins.py index 4189f64958..b7ab523925 100644 --- a/src/pyhf/optimize/mixins.py +++ b/src/pyhf/optimize/mixins.py @@ -1,5 +1,6 @@ """Helper Classes for use of automatic differentiation.""" -from pyhf import get_backend, exceptions +from pyhf.tensor.manager import get_backend +from pyhf import exceptions from pyhf.optimize.common import shim import logging diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index c56094d601..532ae8ff09 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -1,4 +1,4 @@ -from pyhf import default_backend +import pyhf __all__ = [ "constrained_by_normal", @@ -66,8 +66,8 @@ def __init__(self, **kwargs): def width(self): try: - return default_backend.sqrt( - 1.0 / default_backend.astensor(self.factors) + return pyhf.default_backend.sqrt( + 1.0 / pyhf.default_backend.astensor(self.factors) ).tolist() except AttributeError: raise RuntimeError('need to know rate factor to compu') diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index b5d079b65f..f131d656a2 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -1,4 +1,6 @@ -from pyhf import get_backend, default_backend, events +import pyhf +from pyhf import events +from pyhf.tensor.manager import get_backend from pyhf.tensor.common import ( _tensorviewer_from_slices, _tensorviewer_from_sizes, @@ -52,9 +54,9 @@ def __init__(self, shape, par_map, par_selection): batch_size = shape[0] if len(shape) > 1 else None - fullsize = default_backend.product(default_backend.astensor(shape)) - flat_indices = default_backend.astensor(range(int(fullsize)), dtype='int') - self._all_indices = default_backend.reshape(flat_indices, shape) + fullsize = pyhf.default_backend.product(pyhf.default_backend.astensor(shape)) + flat_indices = pyhf.default_backend.astensor(range(int(fullsize)), dtype='int') + self._all_indices = pyhf.default_backend.reshape(flat_indices, shape) # a tensor viewer that can split and stitch parameters self.allpar_viewer = _tensorviewer_from_parmap(par_map, batch_size) diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index 502461559e..081e009b41 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -4,7 +4,8 @@ import logging import pyhf.parameters -from pyhf import get_backend, default_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import exceptions from pyhf import utils from pyhf import events @@ -64,10 +65,10 @@ def append(self, channel, sample, defined_samp): self.mega_samples[sample]['nom'] += nom def finalize(self): - nominal_rates = default_backend.astensor( + nominal_rates = pyhf.default_backend.astensor( [self.mega_samples[sample]['nom'] for sample in self.config.samples] ) - _nominal_rates = default_backend.reshape( + _nominal_rates = pyhf.default_backend.reshape( nominal_rates, ( 1, # modifier dimension.. nominal_rates is the base @@ -476,7 +477,7 @@ def __init__(self, config, modifiers, nominal_rates, batch_size=None): self._delta_mods = [] self.batch_size = batch_size - self._nominal_rates = default_backend.tile( + self._nominal_rates = pyhf.default_backend.tile( nominal_rates, (1, 1, self.batch_size or 1, 1) ) diff --git a/src/pyhf/tensor/common.py b/src/pyhf/tensor/common.py index 32f3583f40..75257b492d 100644 --- a/src/pyhf/tensor/common.py +++ b/src/pyhf/tensor/common.py @@ -1,4 +1,5 @@ -from pyhf import default_backend, get_backend +import pyhf +from pyhf.tensor.manager import get_backend from pyhf import events @@ -17,10 +18,10 @@ def __init__(self, indices, batch_size=None, names=None): self.batch_size = batch_size self.names = names self._partition_indices = indices - _concat_indices = default_backend.astensor( - default_backend.concatenate(self._partition_indices), dtype='int' + _concat_indices = pyhf.default_backend.astensor( + pyhf.default_backend.concatenate(self._partition_indices), dtype='int' ) - self._sorted_indices = default_backend.tolist(_concat_indices.argsort()) + self._sorted_indices = pyhf.default_backend.tolist(_concat_indices.argsort()) self._precompute() events.subscribe('tensorlib_changed')(self._precompute) @@ -64,7 +65,7 @@ def split(self, data, selection=None): def _tensorviewer_from_slices(target_slices, names, batch_size): - db = default_backend + db = pyhf.default_backend ranges = [] for sl in target_slices: ranges.append(db.astensor(range(sl.start, sl.stop))) diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py new file mode 100644 index 0000000000..f1b0442a19 --- /dev/null +++ b/src/pyhf/tensor/manager.py @@ -0,0 +1,173 @@ +from pyhf.tensor import BackendRetriever + +STATE = { + 'default': (None, None), + 'current': (None, None), +} + + +def get_backend(default=False): + """ + Get the current backend and the associated optimizer + + Example: + >>> import pyhf + >>> backend, optimizer = pyhf.get_backend() + >>> backend + + >>> optimizer + + + Args: + default (:obj:`bool`): Return the default backend or not + + Returns: + backend, optimizer + """ + global STATE + return STATE['default' if default else 'current'] + + +from pyhf.optimize import OptimizerRetriever +from pyhf import events + +STATE['default'] = ( + BackendRetriever.numpy_backend(), + OptimizerRetriever.scipy_optimizer(), +) +STATE['current'] = STATE['default'] + + +@events.register('change_backend') +def set_backend(backend, custom_optimizer=None, precision=None, default=False): + """ + Set the backend and the associated optimizer + + Example: + >>> import pyhf + >>> pyhf.set_backend("tensorflow") + >>> pyhf.tensorlib.name + 'tensorflow' + >>> pyhf.tensorlib.precision + '64b' + >>> pyhf.set_backend(b"pytorch", precision="32b") + >>> pyhf.tensorlib.name + 'pytorch' + >>> pyhf.tensorlib.precision + '32b' + >>> pyhf.set_backend(pyhf.tensor.numpy_backend()) + >>> pyhf.tensorlib.name + 'numpy' + >>> pyhf.tensorlib.precision + '64b' + + Args: + backend (:obj:`str` or `pyhf.tensor` backend): One of the supported pyhf backends: NumPy, TensorFlow, PyTorch, and JAX + custom_optimizer (`pyhf.optimize` optimizer): Optional custom optimizer defined by the user + precision (:obj:`str`): Floating point precision to use in the backend: ``64b`` or ``32b``. Default is backend dependent. + default (:obj:`bool`): Set the backend as the default backend additionally + + Returns: + None + """ + global STATE + + _supported_precisions = ["32b", "64b"] + backend_kwargs = {} + + if isinstance(precision, (str, bytes)): + if isinstance(precision, bytes): + precision = precision.decode("utf-8") + precision = precision.lower() + + if isinstance(backend, (str, bytes)): + if isinstance(backend, bytes): + backend = backend.decode("utf-8") + backend = backend.lower() + + if precision is not None: + backend_kwargs["precision"] = precision + + try: + backend = getattr(BackendRetriever, f"{backend:s}_backend")( + **backend_kwargs + ) + except TypeError: + raise InvalidBackend( + f"The backend provided is not supported: {backend:s}. Select from one of the supported backends: numpy, tensorflow, pytorch" + ) + + _name_supported = getattr(BackendRetriever, f"{backend.name:s}_backend") + if _name_supported: + if not isinstance(backend, _name_supported): + raise AttributeError( + f"'{backend.name:s}' is not a valid name attribute for backend type {type(backend)}\n Custom backends must have names unique from supported backends" + ) + if backend.precision not in _supported_precisions: + raise Unsupported( + f"The backend precision provided is not supported: {backend.precision:s}. Select from one of the supported precisions: {', '.join([str(v) for v in _supported_precisions])}" + ) + # If "precision" arg passed, it should always win + # If no "precision" arg, defer to tensor backend object API if set there + if precision is not None: + if backend.precision != precision: + backend_kwargs["precision"] = precision + backend = getattr(BackendRetriever, f"{backend.name:s}_backend")( + **backend_kwargs + ) + + if custom_optimizer: + if isinstance(custom_optimizer, (str, bytes)): + if isinstance(custom_optimizer, bytes): + custom_optimizer = custom_optimizer.decode("utf-8") + try: + new_optimizer = getattr( + OptimizerRetriever, f"{custom_optimizer.lower()}_optimizer" + )() + except TypeError: + raise InvalidOptimizer( + f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit" + ) + else: + _name_supported = getattr( + OptimizerRetriever, f"{custom_optimizer.name:s}_optimizer" + ) + if _name_supported: + if not isinstance(custom_optimizer, _name_supported): + raise AttributeError( + f"'{custom_optimizer.name}' is not a valid name attribute for optimizer type {type(custom_optimizer)}\n Custom optimizers must have names unique from supported optimizers" + ) + new_optimizer = custom_optimizer + + else: + new_optimizer = OptimizerRetriever.scipy_optimizer() + + # need to determine if the tensorlib changed or the optimizer changed for events + tensorlib_changed = bool( + (backend.name != STATE['current'][0].name) + | (backend.precision != STATE['current'][0].precision) + ) + optimizer_changed = bool(STATE['current'][1] != new_optimizer) + # set new backend + STATE['current'] = (backend, new_optimizer) + if default: + default_tensorlib_changed = bool( + (backend.name != STATE['default'][0].name) + | (backend.precision != STATE['default'][0].precision) + ) + default_optimizer_changed = bool(STATE['default'][1] != new_optimizer) + # trigger events + if default_tensorlib_changed: + events.trigger("default_tensorlib_changed")() + if default_optimizer_changed: + events.trigger("default_optimizer_changed")() + + STATE['default'] = STATE['current'] + + # trigger events + if tensorlib_changed: + events.trigger("tensorlib_changed")() + if optimizer_changed: + events.trigger("optimizer_changed")() + # set up any other globals for backend + backend._setup() From 14e332ddf976dce0792b1181445ba875d4c62c36 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 08:50:08 -0700 Subject: [PATCH 02/26] fix some codefactors --- src/pyhf/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 72b4e27487..393cf4dc1b 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -44,7 +44,7 @@ def __dir__(): def __getattr__(name): if name == 'tensorlib': return get_backend(default=False)[0] - elif name == 'optimizer': + if name == 'optimizer': return get_backend(default=False)[1] - elif name == 'default_backend': + if name == 'default_backend': return get_backend(default=True)[0] From 0df86423e1a3c5f67514992a5273e78edb323996 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 08:55:28 -0700 Subject: [PATCH 03/26] use non-globals --- src/pyhf/tensor/manager.py | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index f1b0442a19..5ec0df61a4 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -1,6 +1,9 @@ +import sys + from pyhf.tensor import BackendRetriever -STATE = { +this = sys.modules[__name__] +this.state = { 'default': (None, None), 'current': (None, None), } @@ -24,18 +27,17 @@ def get_backend(default=False): Returns: backend, optimizer """ - global STATE - return STATE['default' if default else 'current'] + return this.state['default' if default else 'current'] from pyhf.optimize import OptimizerRetriever from pyhf import events -STATE['default'] = ( +this.state['default'] = ( BackendRetriever.numpy_backend(), OptimizerRetriever.scipy_optimizer(), ) -STATE['current'] = STATE['default'] +this.state['current'] = this.state['default'] @events.register('change_backend') @@ -70,8 +72,6 @@ def set_backend(backend, custom_optimizer=None, precision=None, default=False): Returns: None """ - global STATE - _supported_precisions = ["32b", "64b"] backend_kwargs = {} @@ -144,25 +144,25 @@ def set_backend(backend, custom_optimizer=None, precision=None, default=False): # need to determine if the tensorlib changed or the optimizer changed for events tensorlib_changed = bool( - (backend.name != STATE['current'][0].name) - | (backend.precision != STATE['current'][0].precision) + (backend.name != this.state['current'][0].name) + | (backend.precision != this.state['current'][0].precision) ) - optimizer_changed = bool(STATE['current'][1] != new_optimizer) + optimizer_changed = bool(this.state['current'][1] != new_optimizer) # set new backend - STATE['current'] = (backend, new_optimizer) + this.state['current'] = (backend, new_optimizer) if default: default_tensorlib_changed = bool( - (backend.name != STATE['default'][0].name) - | (backend.precision != STATE['default'][0].precision) + (backend.name != this.state['default'][0].name) + | (backend.precision != this.state['default'][0].precision) ) - default_optimizer_changed = bool(STATE['default'][1] != new_optimizer) + default_optimizer_changed = bool(this.state['default'][1] != new_optimizer) # trigger events if default_tensorlib_changed: events.trigger("default_tensorlib_changed")() if default_optimizer_changed: events.trigger("default_optimizer_changed")() - STATE['default'] = STATE['current'] + this.state['default'] = this.state['current'] # trigger events if tensorlib_changed: From fe576d9ce61ff30879260d000b0b598eacaaec71 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 09:05:43 -0700 Subject: [PATCH 04/26] flake8/lint --- src/pyhf/__init__.py | 1 - src/pyhf/constraints.py | 26 +++++++++++++------------- src/pyhf/interpolators/code0.py | 6 +++--- src/pyhf/interpolators/code1.py | 10 +++++----- src/pyhf/interpolators/code2.py | 6 ++++-- src/pyhf/interpolators/code4.py | 28 ++++++++++++++-------------- src/pyhf/interpolators/code4p.py | 6 +++--- src/pyhf/modifiers/shapefactor.py | 2 +- src/pyhf/modifiers/shapesys.py | 23 +++++++++++++---------- src/pyhf/modifiers/staterror.py | 28 +++++++++++++++------------- src/pyhf/tensor/manager.py | 7 ++++--- 11 files changed, 75 insertions(+), 68 deletions(-) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 393cf4dc1b..79b88fe763 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -2,7 +2,6 @@ from pyhf.tensor.manager import get_backend from pyhf.tensor.manager import set_backend from pyhf._version import version as __version__ -from pyhf.exceptions import InvalidBackend, InvalidOptimizer, Unsupported from pyhf.pdf import Model from pyhf.workspace import Workspace diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index 39688a21a7..a4136f227d 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -61,18 +61,18 @@ def __init__(self, pdfconfig, batch_size=None): # if this constraint terms is at all used (non-zrto idx selection # start preparing constant tensors if self.param_viewer.index_selection: - self._normal_data = default_backend.astensor( - default_backend.concatenate(normal_constraint_data), dtype='int' + self._normal_data = pyhf.default_backend.astensor( + pyhf.default_backend.concatenate(normal_constraint_data), dtype='int' ) - _normal_sigmas = default_backend.concatenate(normal_constraint_sigmas) + _normal_sigmas = pyhf.default_backend.concatenate(normal_constraint_sigmas) if self.batch_size: - sigmas = default_backend.reshape(_normal_sigmas, (1, -1)) - self._sigmas = default_backend.tile(sigmas, (self.batch_size, 1)) + sigmas = pyhf.default_backend.reshape(_normal_sigmas, (1, -1)) + self._sigmas = pyhf.default_backend.tile(sigmas, (self.batch_size, 1)) else: self._sigmas = _normal_sigmas - access_field = default_backend.concatenate( + access_field = pyhf.default_backend.concatenate( self.param_viewer.index_selection, axis=1 ) self._access_field = access_field @@ -190,20 +190,20 @@ def __init__(self, pdfconfig, batch_size=None): self._access_field = None self._batched_factors = None if self.param_viewer.index_selection: - self._poisson_data = default_backend.astensor( - default_backend.concatenate(poisson_constraint_data), dtype='int' + self._poisson_data = pyhf.default_backend.astensor( + pyhf.default_backend.concatenate(poisson_constraint_data), dtype='int' ) - _poisson_rate_fac = default_backend.astensor( - default_backend.concatenate(poisson_constraint_rate_factors), + _poisson_rate_fac = pyhf.default_backend.astensor( + pyhf.default_backend.concatenate(poisson_constraint_rate_factors), dtype='float', ) - factors = default_backend.reshape(_poisson_rate_fac, (1, -1)) - self._batched_factors = default_backend.tile( + factors = pyhf.default_backend.reshape(_poisson_rate_fac, (1, -1)) + self._batched_factors = pyhf.default_backend.tile( factors, (self.batch_size or 1, 1) ) - access_field = default_backend.concatenate( + access_field = pyhf.default_backend.concatenate( self.param_viewer.index_selection, axis=1 ) self._access_field = access_field diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index 2874711da2..ad3333eb31 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -27,14 +27,14 @@ class code0: def __init__(self, histogramssets, subscribe=True): """Piecewise-linear Interpolation.""" # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = default_backend.astensor(histogramssets) + self._histogramssets = pyhf.default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets self._deltas_up = self._histogramssets[:, :, 2] - self._histogramssets[:, :, 1] self._deltas_dn = self._histogramssets[:, :, 1] - self._histogramssets[:, :, 0] - self._broadcast_helper = default_backend.ones( - default_backend.shape(self._deltas_up) + self._broadcast_helper = pyhf.default_backend.ones( + pyhf.default_backend.shape(self._deltas_up) ) self._precompute() if subscribe: diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index 2dfb19f554..d0245ccc09 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -28,18 +28,18 @@ class code1: def __init__(self, histogramssets, subscribe=True): """Piecewise-Exponential Interpolation.""" # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = default_backend.astensor(histogramssets) + self._histogramssets = pyhf.default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets - self._deltas_up = default_backend.divide( + self._deltas_up = pyhf.default_backend.divide( self._histogramssets[:, :, 2], self._histogramssets[:, :, 1] ) - self._deltas_dn = default_backend.divide( + self._deltas_dn = pyhf.default_backend.divide( self._histogramssets[:, :, 0], self._histogramssets[:, :, 1] ) - self._broadcast_helper = default_backend.ones( - default_backend.shape(self._deltas_up) + self._broadcast_helper = pyhf.default_backend.ones( + pyhf.default_backend.shape(self._deltas_up) ) self._precompute() diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index 92c45f9d76..46a08c5757 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -31,7 +31,7 @@ class code2: def __init__(self, histogramssets, subscribe=True): """Quadratic Interpolation.""" # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = default_backend.astensor(histogramssets) + self._histogramssets = pyhf.default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets @@ -42,7 +42,9 @@ def __init__(self, histogramssets, subscribe=True): self._b = 0.5 * (self._histogramssets[:, :, 2] - self._histogramssets[:, :, 0]) self._b_plus_2a = self._b + 2 * self._a self._b_minus_2a = self._b - 2 * self._a - self._broadcast_helper = default_backend.ones(default_backend.shape(self._a)) + self._broadcast_helper = pyhf.default_backend.ones( + pyhf.default_backend.shape(self._a) + ) self._precompute() if subscribe: events.subscribe('tensorlib_changed')(self._precompute) diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index d0b2b00c78..32f95062e4 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -38,25 +38,25 @@ def __init__(self, histogramssets, subscribe=True, alpha0=1): assert alpha0 > 0 self.__alpha0 = alpha0 # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = default_backend.astensor(histogramssets) + self._histogramssets = pyhf.default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets - self._deltas_up = default_backend.divide( + self._deltas_up = pyhf.default_backend.divide( self._histogramssets[:, :, 2], self._histogramssets[:, :, 1] ) - self._deltas_dn = default_backend.divide( + self._deltas_dn = pyhf.default_backend.divide( self._histogramssets[:, :, 0], self._histogramssets[:, :, 1] ) - self._broadcast_helper = default_backend.ones( - default_backend.shape(self._deltas_up) + self._broadcast_helper = pyhf.default_backend.ones( + pyhf.default_backend.shape(self._deltas_up) ) self._alpha0 = self._broadcast_helper * self.__alpha0 - deltas_up_alpha0 = default_backend.power(self._deltas_up, self._alpha0) - deltas_dn_alpha0 = default_backend.power(self._deltas_dn, self._alpha0) + deltas_up_alpha0 = pyhf.default_backend.power(self._deltas_up, self._alpha0) + deltas_dn_alpha0 = pyhf.default_backend.power(self._deltas_dn, self._alpha0) # x = A^{-1} b - A_inverse = default_backend.astensor( + A_inverse = pyhf.default_backend.astensor( [ [ 15.0 / (16 * alpha0), @@ -108,19 +108,19 @@ def __init__(self, histogramssets, subscribe=True, alpha0=1): ], ] ) - b = default_backend.stack( + b = pyhf.default_backend.stack( [ deltas_up_alpha0 - self._broadcast_helper, deltas_dn_alpha0 - self._broadcast_helper, - default_backend.log(self._deltas_up) * deltas_up_alpha0, - -default_backend.log(self._deltas_dn) * deltas_dn_alpha0, - default_backend.power(default_backend.log(self._deltas_up), 2) + pyhf.default_backend.log(self._deltas_up) * deltas_up_alpha0, + -pyhf.default_backend.log(self._deltas_dn) * deltas_dn_alpha0, + pyhf.default_backend.power(pyhf.default_backend.log(self._deltas_up), 2) * deltas_up_alpha0, - default_backend.power(default_backend.log(self._deltas_dn), 2) + pyhf.default_backend.power(pyhf.default_backend.log(self._deltas_dn), 2) * deltas_dn_alpha0, ] ) - self._coefficients = default_backend.einsum( + self._coefficients = pyhf.default_backend.einsum( 'rc,shb,cshb->rshb', A_inverse, self._broadcast_helper, b ) diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index b5671cb447..a25ffb14fb 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -20,14 +20,14 @@ class code4p: def __init__(self, histogramssets, subscribe=True): """Piecewise-Linear + Polynomial Interpolation.""" # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = default_backend.astensor(histogramssets) + self._histogramssets = pyhf.default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets self._deltas_up = self._histogramssets[:, :, 2] - self._histogramssets[:, :, 1] self._deltas_dn = self._histogramssets[:, :, 1] - self._histogramssets[:, :, 0] - self._broadcast_helper = default_backend.ones( - default_backend.shape(self._deltas_up) + self._broadcast_helper = pyhf.default_backend.ones( + pyhf.default_backend.shape(self._deltas_up) ) self._precompute() if subscribe: diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index 00b34121f9..10207575b3 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -145,7 +145,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = default_backend.tile( + self._access_field = pyhf.default_backend.tile( global_concatenated_bin_indices, (len(shapefactor_mods), self.batch_size or 1, 1), ) diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 5fa3f608f9..dbb294db45 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -86,7 +86,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] for m in keys ] - self.__shapesys_info = default_backend.astensor( + self.__shapesys_info = pyhf.default_backend.astensor( [ [ [ @@ -105,7 +105,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = default_backend.tile( + self._access_field = pyhf.default_backend.tile( global_concatenated_bin_indices, (len(self._shapesys_mods), self.batch_size or 1, 1), ) @@ -126,14 +126,14 @@ def _reindex_access_field(self, pdfconfig): singular_sample_index = [ idx for idx, syst in enumerate( - default_backend.astensor(self._shapesys_mask)[syst_index, :, 0] + pyhf.default_backend.astensor(self._shapesys_mask)[syst_index, :, 0] ) if any(syst) ][-1] for batch_index, batch_access in enumerate(syst_access): selection = self.param_viewer.index_selection[syst_index][batch_index] - access_field_for_syst_and_batch = default_backend.zeros( + access_field_for_syst_and_batch = pyhf.default_backend.zeros( len(batch_access) ) @@ -167,18 +167,21 @@ def finalize(self, pdfconfig): # most one sample # sample_uncert_info: ([mask, nominal rate, uncertainty], bin) sample_uncert_info = mod_uncert_info[ - default_backend.astensor( - default_backend.sum(mod_uncert_info[:, 0] > 0, axis=1), dtype='bool' + pyhf.default_backend.astensor( + pyhf.default_backend.sum(mod_uncert_info[:, 0] > 0, axis=1), + dtype='bool', ) ][0] # bin_mask: ([mask], bin) - bin_mask = default_backend.astensor(sample_uncert_info[0], dtype='bool') + bin_mask = pyhf.default_backend.astensor( + sample_uncert_info[0], dtype='bool' + ) # nom_unc: ([nominal, uncertainty], bin) nom_unc = sample_uncert_info[1:] # compute gamma**2 and sigma**2 - nom_unc_sq = default_backend.power(nom_unc, 2) + nom_unc_sq = pyhf.default_backend.power(nom_unc, 2) # when the nominal rate = 0 OR uncertainty = 0, set = 1 nom_unc_sq[nom_unc_sq == 0] = 1 # divide (gamma**2 / sigma**2) and mask to set factors for only the @@ -186,8 +189,8 @@ def finalize(self, pdfconfig): factors = (nom_unc_sq[0] / nom_unc_sq[1])[bin_mask] assert len(factors) == pdfconfig.param_set(pname).n_parameters - pdfconfig.param_set(pname).factors = default_backend.tolist(factors) - pdfconfig.param_set(pname).auxdata = default_backend.tolist(factors) + pdfconfig.param_set(pname).factors = pyhf.default_backend.tolist(factors) + pdfconfig.param_set(pname).auxdata = pyhf.default_backend.tolist(factors) def apply(self, pars): """ diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index 51518acc9e..cea63d7529 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -78,7 +78,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] for m in keys ] - self.__staterror_uncrt = default_backend.astensor( + self.__staterror_uncrt = pyhf.default_backend.astensor( [ [ [ @@ -96,7 +96,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = default_backend.tile( + self._access_field = pyhf.default_backend.tile( global_concatenated_bin_indices, (len(self._staterr_mods), self.batch_size or 1, 1), ) @@ -125,35 +125,37 @@ def _precompute(self): self.staterror_default = tensorlib.ones(tensorlib.shape(self.staterror_mask)) def finalize(self, pdfconfig): - staterror_mask = default_backend.astensor(self._staterror_mask) + staterror_mask = pyhf.default_backend.astensor(self._staterror_mask) for this_mask, uncert_this_mod, mod in zip( staterror_mask, self.__staterror_uncrt, self._staterr_mods ): - active_nominals = default_backend.where( + active_nominals = pyhf.default_backend.where( this_mask[:, 0, :], uncert_this_mod[:, 1, :], - default_backend.zeros(uncert_this_mod[:, 1, :].shape), + pyhf.default_backend.zeros(uncert_this_mod[:, 1, :].shape), ) - summed_nominals = default_backend.sum(active_nominals, axis=0) + summed_nominals = pyhf.default_backend.sum(active_nominals, axis=0) # the below tries to filter cases in which this modifier is not # used by checking non zeroness.. should probably use mask - numerator = default_backend.where( + numerator = pyhf.default_backend.where( uncert_this_mod[:, 1, :] > 0, uncert_this_mod[:, 0, :], - default_backend.zeros(uncert_this_mod[:, 1, :].shape), + pyhf.default_backend.zeros(uncert_this_mod[:, 1, :].shape), ) - denominator = default_backend.where( + denominator = pyhf.default_backend.where( summed_nominals > 0, summed_nominals, - default_backend.ones(uncert_this_mod[:, 1, :].shape), + pyhf.default_backend.ones(uncert_this_mod[:, 1, :].shape), ) relerrs = numerator / denominator - sigmas = default_backend.sqrt( - default_backend.sum(default_backend.power(relerrs, 2), axis=0) + sigmas = pyhf.default_backend.sqrt( + pyhf.default_backend.sum(pyhf.default_backend.power(relerrs, 2), axis=0) ) assert len(sigmas[sigmas > 0]) == pdfconfig.param_set(mod).n_parameters - pdfconfig.param_set(mod).sigmas = default_backend.tolist(sigmas[sigmas > 0]) + pdfconfig.param_set(mod).sigmas = pyhf.default_backend.tolist( + sigmas[sigmas > 0] + ) def apply(self, pars): if not self.param_viewer.index_selection: diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index 5ec0df61a4..ed76624f17 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -1,6 +1,7 @@ import sys from pyhf.tensor import BackendRetriever +from pyhf import exceptions this = sys.modules[__name__] this.state = { @@ -93,7 +94,7 @@ def set_backend(backend, custom_optimizer=None, precision=None, default=False): **backend_kwargs ) except TypeError: - raise InvalidBackend( + raise exceptions.InvalidBackend( f"The backend provided is not supported: {backend:s}. Select from one of the supported backends: numpy, tensorflow, pytorch" ) @@ -104,7 +105,7 @@ def set_backend(backend, custom_optimizer=None, precision=None, default=False): f"'{backend.name:s}' is not a valid name attribute for backend type {type(backend)}\n Custom backends must have names unique from supported backends" ) if backend.precision not in _supported_precisions: - raise Unsupported( + raise exceptions.Unsupported( f"The backend precision provided is not supported: {backend.precision:s}. Select from one of the supported precisions: {', '.join([str(v) for v in _supported_precisions])}" ) # If "precision" arg passed, it should always win @@ -125,7 +126,7 @@ def set_backend(backend, custom_optimizer=None, precision=None, default=False): OptimizerRetriever, f"{custom_optimizer.lower()}_optimizer" )() except TypeError: - raise InvalidOptimizer( + raise exceptions.InvalidOptimizer( f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit" ) else: From c531b5642572ee5770ab0ebeb4522857f004a680 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 09:09:59 -0700 Subject: [PATCH 05/26] dropped an import, whoops --- src/pyhf/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 79b88fe763..3f653f65b8 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,4 +1,5 @@ from pyhf.tensor import BackendRetriever as tensor +from pyhf.optimize import OptimizerRetriever as optimize from pyhf.tensor.manager import get_backend from pyhf.tensor.manager import set_backend from pyhf._version import version as __version__ From a70b8c2a804cc7ca8ac52391d9a49da95cc1752c Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 09:11:57 -0700 Subject: [PATCH 06/26] fix up optimize --- src/pyhf/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 3f653f65b8..b193f4c141 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -23,6 +23,7 @@ "infer", "interpolators", "modifiers", + "optimize", "optimizer", "parameters", "patchset", From 2f484f38c44e3eafa2dc3990c1d0ffc84acdccf1 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 09:32:57 -0700 Subject: [PATCH 07/26] raise attributeerror --- src/pyhf/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index b193f4c141..c8c496de71 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -49,3 +49,4 @@ def __getattr__(name): return get_backend(default=False)[1] if name == 'default_backend': return get_backend(default=True)[0] + raise AttributeError From ec2e8f070628f51e04dd74271e435e36757e30c2 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 09:40:17 -0700 Subject: [PATCH 08/26] drop from docs/api --- docs/api.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/api.rst b/docs/api.rst index 048e73728e..4ad8519cd5 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -10,8 +10,6 @@ Top-Level :toctree: _generated/ :nosignatures: - default_backend - default_optimizer tensorlib optimizer get_backend From 08dfa02eac4c59f0fbd215c301326de794874753 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 15 Oct 2021 10:10:28 -0700 Subject: [PATCH 09/26] minor fixes --- src/pyhf/tensor/manager.py | 1 + tests/conftest.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index ed76624f17..cabb9a6f7c 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -16,6 +16,7 @@ def get_backend(default=False): Example: >>> import pyhf + >>> pyhf.set_backend("numpy") >>> backend, optimizer = pyhf.get_backend() >>> backend diff --git a/tests/conftest.py b/tests/conftest.py index 959756a5a9..1312645a75 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -69,9 +69,9 @@ def reset_backend(): """ This fixture is automatically run to reset the backend before and after a test function runs. """ - pyhf.set_backend(pyhf.default_backend) + pyhf.set_backend('numpy') yield reset_backend - pyhf.set_backend(pyhf.default_backend) + pyhf.set_backend('numpy') @pytest.fixture( From bb7b26fe5ef852fb5b1f18d2f390eec1f4cd8fb8 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Mon, 18 Oct 2021 09:56:10 -0700 Subject: [PATCH 10/26] tests --- tests/conftest.py | 4 +-- tests/test_backends.py | 71 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 tests/test_backends.py diff --git a/tests/conftest.py b/tests/conftest.py index 1312645a75..61a965e7c6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -69,9 +69,9 @@ def reset_backend(): """ This fixture is automatically run to reset the backend before and after a test function runs. """ - pyhf.set_backend('numpy') + pyhf.set_backend('numpy', default=True) yield reset_backend - pyhf.set_backend('numpy') + pyhf.set_backend('numpy', default=True) @pytest.fixture( diff --git a/tests/test_backends.py b/tests/test_backends.py new file mode 100644 index 0000000000..cf4240f0d0 --- /dev/null +++ b/tests/test_backends.py @@ -0,0 +1,71 @@ +import pyhf +import jax +import pytest + + +def test_default_backend(): + pyhf.set_backend("jax", default=True) + + assert pyhf.default_backend.name == 'jax' + assert pyhf.tensorlib.name == 'jax' + + +def test_nondefault_backend(): + pyhf.set_backend("jax", default=False) + + assert pyhf.default_backend.name == 'numpy' + assert pyhf.tensorlib.name == 'jax' + + +@pytest.mark.parametrize('jitted', (False, True)) +def test_diffable_backend(jitted): + pyhf.set_backend("jax", default=True) + + def example_op(x): + y = pyhf.default_backend.astensor(x) + return 2 * y + + if jitted: + assert jax.jacrev(jax.jit(example_op))([1.0]) == [2.0] + else: + assert jax.jacrev(example_op)([1.0]) == [2.0] + + def example_op2(x): + y = pyhf.default_backend.power(x, 2) + z = pyhf.tensorlib.sum(y) + return z + + if jitted: + assert jax.jacrev(jax.jit(example_op2))( + pyhf.tensorlib.astensor([2.0, 3.0]) + ).tolist() == [ + 4.0, + 6.0, + ] + else: + assert jax.jacrev(example_op2)( + pyhf.tensorlib.astensor([2.0, 3.0]) + ).tolist() == [ + 4.0, + 6.0, + ] + + +def test_diffable_backend_failure(): + pyhf.set_backend("numpy", default=True) + pyhf.set_backend("jax") + + def example_op(x): + y = pyhf.default_backend.astensor(x) + return 2 * y + + with pytest.raises(jax._src.errors.TracerArrayConversionError): + jax.jacrev(example_op)([1.0]) + + def example_op2(x): + y = pyhf.default_backend.power(x, 2) + z = pyhf.tensorlib.sum(y) + return z + + with pytest.raises(jax._src.errors.TracerArrayConversionError): + jax.jacrev(example_op2)(pyhf.tensorlib.astensor([2.0, 3.0])) From 06060928f752cba4befef5d39d84c140117ce94a Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Mon, 18 Oct 2021 13:19:11 -0700 Subject: [PATCH 11/26] handle other error --- tests/test_backends.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_backends.py b/tests/test_backends.py index cf4240f0d0..d2798d50bf 100644 --- a/tests/test_backends.py +++ b/tests/test_backends.py @@ -59,7 +59,9 @@ def example_op(x): y = pyhf.default_backend.astensor(x) return 2 * y - with pytest.raises(jax._src.errors.TracerArrayConversionError): + with pytest.raises( + (jax._src.errors.TracerArrayConversionError, jax.errors.ConcretizationTypeError) + ): jax.jacrev(example_op)([1.0]) def example_op2(x): From 5455fa316eaccefad45b4e4663fa32e1cc52b78e Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Mon, 18 Oct 2021 15:48:15 -0700 Subject: [PATCH 12/26] fix up --- src/pyhf/modifiers/histosys.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/pyhf/modifiers/histosys.py b/src/pyhf/modifiers/histosys.py index 0680018a3b..3d50d048c7 100644 --- a/src/pyhf/modifiers/histosys.py +++ b/src/pyhf/modifiers/histosys.py @@ -1,6 +1,8 @@ import logging -from pyhf import get_backend, default_backend, events +import pyhf +from pyhf import events +from pyhf.tensor.manager import get_backend from pyhf import interpolators from pyhf.parameters import ParamViewer @@ -59,16 +61,16 @@ def append(self, key, channel, sample, thismod, defined_samp): def finalize(self): for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = default_backend.concatenate( + sample["data"]["mask"] = pyhf.default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["lo_data"] = default_backend.concatenate( + sample["data"]["lo_data"] = pyhf.default_backend.concatenate( sample["data"]["lo_data"] ) - sample["data"]["hi_data"] = default_backend.concatenate( + sample["data"]["hi_data"] = pyhf.default_backend.concatenate( sample["data"]["hi_data"] ) - sample["data"]["nom_data"] = default_backend.concatenate( + sample["data"]["nom_data"] = pyhf.default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data From 66ab686b9ba3d132ba59dda0a89ec98e2058e5a3 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 06:11:22 -0700 Subject: [PATCH 13/26] fix up --- src/pyhf/modifiers/shapesys.py | 6 +++--- src/pyhf/modifiers/staterror.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 67f65b0969..5675f1ea69 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -66,13 +66,13 @@ def append(self, key, channel, sample, thismod, defined_samp): def finalize(self): for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = default_backend.concatenate( + sample["data"]["mask"] = pyhf.default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["uncrt"] = default_backend.concatenate( + sample["data"]["uncrt"] = pyhf.default_backend.concatenate( sample["data"]["uncrt"] ) - sample["data"]["nom_data"] = default_backend.concatenate( + sample["data"]["nom_data"] = pyhf.default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index c114a71ceb..7190df5e98 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -57,13 +57,13 @@ def append(self, key, channel, sample, thismod, defined_samp): def finalize(self): for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = default_backend.concatenate( + sample["data"]["mask"] = pyhf.default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["uncrt"] = default_backend.concatenate( + sample["data"]["uncrt"] = pyhf.default_backend.concatenate( sample["data"]["uncrt"] ) - sample["data"]["nom_data"] = default_backend.concatenate( + sample["data"]["nom_data"] = pyhf.default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data From 7d759f64a553401c757bce329107899d4a19f0d3 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 07:03:34 -0700 Subject: [PATCH 14/26] fix up more --- tests/test_backends.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/test_backends.py b/tests/test_backends.py index d2798d50bf..e050633c3a 100644 --- a/tests/test_backends.py +++ b/tests/test_backends.py @@ -60,7 +60,11 @@ def example_op(x): return 2 * y with pytest.raises( - (jax._src.errors.TracerArrayConversionError, jax.errors.ConcretizationTypeError) + ( + ValueError, + jax._src.errors.TracerArrayConversionError, + jax.errors.ConcretizationTypeError, + ) ): jax.jacrev(example_op)([1.0]) From 97dcde70f7ff170db8c0d10cd10e63183ef19043 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 07:17:43 -0700 Subject: [PATCH 15/26] fix last test --- tests/test_public_api_repr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_public_api_repr.py b/tests/test_public_api_repr.py index efa3857355..ccb4738d63 100644 --- a/tests/test_public_api_repr.py +++ b/tests/test_public_api_repr.py @@ -13,6 +13,7 @@ def test_top_level_public_api(): "Workspace", "__version__", "compat", + "default_backend", "exceptions", "get_backend", "infer", From 91c54190533cbb9d54433c7d3371e5c7ee40030a Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 07:36:31 -0700 Subject: [PATCH 16/26] last last last fix --- tests/test_public_api_repr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_public_api_repr.py b/tests/test_public_api_repr.py index ccb4738d63..e59370e476 100644 --- a/tests/test_public_api_repr.py +++ b/tests/test_public_api_repr.py @@ -19,6 +19,7 @@ def test_top_level_public_api(): "infer", "interpolators", "modifiers", + "optimize", "optimizer", "parameters", "patchset", From 7eeeec54a227b8ea42e82dda149d6b7773890966 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 14:08:30 -0700 Subject: [PATCH 17/26] make sure we test all backgrounds with default backend --- tests/test_simplemodels.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index ac3842cca2..5b8eded64e 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -3,7 +3,7 @@ import pyhf -def test_correlated_background(): +def test_correlated_background(backend): model = pyhf.simplemodels.correlated_background( signal=[12.0, 11.0], bkg=[50.0, 52.0], @@ -17,7 +17,7 @@ def test_correlated_background(): assert model.config.suggested_init() == [0.0, 1.0] -def test_uncorrelated_background(): +def test_uncorrelated_background(backend): model = pyhf.simplemodels.uncorrelated_background( signal=[12.0, 11.0], bkg=[50.0, 52.0], bkg_uncertainty=[3.0, 7.0] ) From 8c2ee8e7205ffdfa279fab9ff8111a027655d786 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 14:12:58 -0700 Subject: [PATCH 18/26] add tests to handle the use case --- tests/test_simplemodels.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index 5b8eded64e..3b5056baca 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -1,8 +1,15 @@ import warnings +import pytest import pyhf +@pytest.fixture(scope='function') +def default_backend(backend): + pyhf.set_backend(*backend, default=True) + yield backend + + def test_correlated_background(backend): model = pyhf.simplemodels.correlated_background( signal=[12.0, 11.0], @@ -32,6 +39,35 @@ def test_uncorrelated_background(backend): assert model.config.suggested_init() == [1.0, 1.0, 1.0] +def test_correlated_background_default_backend(default_backend): + model = pyhf.simplemodels.correlated_background( + signal=[12.0, 11.0], + bkg=[50.0, 52.0], + bkg_up=[45.0, 57.0], + bkg_down=[55.0, 47.0], + ) + assert model.config.channels == ["single_channel"] + assert model.config.samples == ["background", "signal"] + assert model.config.par_order == ["correlated_bkg_uncertainty", "mu"] + assert model.config.par_names() == ['correlated_bkg_uncertainty', "mu"] + assert model.config.suggested_init() == [0.0, 1.0] + + +def test_uncorrelated_background_default_backend(default_backend): + model = pyhf.simplemodels.uncorrelated_background( + signal=[12.0, 11.0], bkg=[50.0, 52.0], bkg_uncertainty=[3.0, 7.0] + ) + assert model.config.channels == ["singlechannel"] + assert model.config.samples == ["background", "signal"] + assert model.config.par_order == ["mu", "uncorr_bkguncrt"] + assert model.config.par_names() == [ + 'mu', + 'uncorr_bkguncrt[0]', + 'uncorr_bkguncrt[1]', + ] + assert model.config.suggested_init() == [1.0, 1.0, 1.0] + + # TODO: Remove when pyhf.simplemodels.hepdata_like is removed in pyhf v0.7.0 def test_deprecated_apis(): with warnings.catch_warnings(record=True) as _warning: From 8bdc25266a14912bd1096f5bb9de1d5ea83681fd Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Tue, 19 Oct 2021 14:29:21 -0700 Subject: [PATCH 19/26] xfail tests --- tests/test_simplemodels.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index 3b5056baca..1ad338d7b7 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -39,6 +39,11 @@ def test_uncorrelated_background(backend): assert model.config.suggested_init() == [1.0, 1.0, 1.0] +# See #1654 +@pytest.mark.fail_pytorch +@pytest.mark.fail_pytorch64 +@pytest.mark.fail_tensorflow +@pytest.mark.fail_jax def test_correlated_background_default_backend(default_backend): model = pyhf.simplemodels.correlated_background( signal=[12.0, 11.0], @@ -53,6 +58,11 @@ def test_correlated_background_default_backend(default_backend): assert model.config.suggested_init() == [0.0, 1.0] +# See #1654 +@pytest.mark.fail_pytorch +@pytest.mark.fail_pytorch64 +@pytest.mark.fail_tensorflow +@pytest.mark.fail_jax def test_uncorrelated_background_default_backend(default_backend): model = pyhf.simplemodels.uncorrelated_background( signal=[12.0, 11.0], bkg=[50.0, 52.0], bkg_uncertainty=[3.0, 7.0] From 45415cfaa8039e0aca4b8e24bf5d3cc44f5da0e0 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Wed, 20 Oct 2021 16:24:11 -0700 Subject: [PATCH 20/26] use db=pyhf.db --- src/pyhf/constraints.py | 30 +++++++++++++----------- src/pyhf/interpolators/code0.py | 9 ++++---- src/pyhf/interpolators/code1.py | 13 ++++++----- src/pyhf/interpolators/code2.py | 9 ++++---- src/pyhf/interpolators/code4.py | 32 ++++++++++++++------------ src/pyhf/interpolators/code4p.py | 9 ++++---- src/pyhf/modifiers/histosys.py | 10 ++++---- src/pyhf/modifiers/shapefactor.py | 3 ++- src/pyhf/modifiers/shapesys.py | 34 +++++++++++++++------------ src/pyhf/modifiers/staterror.py | 38 ++++++++++++++++--------------- src/pyhf/parameters/paramsets.py | 6 +++-- src/pyhf/parameters/paramview.py | 8 ++++--- src/pyhf/pdf.py | 12 ++++++---- src/pyhf/tensor/common.py | 12 ++++++---- 14 files changed, 126 insertions(+), 99 deletions(-) diff --git a/src/pyhf/constraints.py b/src/pyhf/constraints.py index a4136f227d..075c5bbe7b 100644 --- a/src/pyhf/constraints.py +++ b/src/pyhf/constraints.py @@ -13,6 +13,8 @@ def __dir__(): class gaussian_constraint_combined: def __init__(self, pdfconfig, batch_size=None): + default_backend = pyhf.default_backend + self.batch_size = batch_size # iterate over all constraints order doesn't matter.... @@ -61,18 +63,18 @@ def __init__(self, pdfconfig, batch_size=None): # if this constraint terms is at all used (non-zrto idx selection # start preparing constant tensors if self.param_viewer.index_selection: - self._normal_data = pyhf.default_backend.astensor( - pyhf.default_backend.concatenate(normal_constraint_data), dtype='int' + self._normal_data = default_backend.astensor( + default_backend.concatenate(normal_constraint_data), dtype='int' ) - _normal_sigmas = pyhf.default_backend.concatenate(normal_constraint_sigmas) + _normal_sigmas = default_backend.concatenate(normal_constraint_sigmas) if self.batch_size: - sigmas = pyhf.default_backend.reshape(_normal_sigmas, (1, -1)) - self._sigmas = pyhf.default_backend.tile(sigmas, (self.batch_size, 1)) + sigmas = default_backend.reshape(_normal_sigmas, (1, -1)) + self._sigmas = default_backend.tile(sigmas, (self.batch_size, 1)) else: self._sigmas = _normal_sigmas - access_field = pyhf.default_backend.concatenate( + access_field = default_backend.concatenate( self.param_viewer.index_selection, axis=1 ) self._access_field = access_field @@ -145,6 +147,8 @@ def logpdf(self, auxdata, pars): class poisson_constraint_combined: def __init__(self, pdfconfig, batch_size=None): + default_backend = pyhf.default_backend + self.batch_size = batch_size # iterate over all constraints order doesn't matter.... @@ -190,20 +194,20 @@ def __init__(self, pdfconfig, batch_size=None): self._access_field = None self._batched_factors = None if self.param_viewer.index_selection: - self._poisson_data = pyhf.default_backend.astensor( - pyhf.default_backend.concatenate(poisson_constraint_data), dtype='int' + self._poisson_data = default_backend.astensor( + default_backend.concatenate(poisson_constraint_data), dtype='int' ) - _poisson_rate_fac = pyhf.default_backend.astensor( - pyhf.default_backend.concatenate(poisson_constraint_rate_factors), + _poisson_rate_fac = default_backend.astensor( + default_backend.concatenate(poisson_constraint_rate_factors), dtype='float', ) - factors = pyhf.default_backend.reshape(_poisson_rate_fac, (1, -1)) - self._batched_factors = pyhf.default_backend.tile( + factors = default_backend.reshape(_poisson_rate_fac, (1, -1)) + self._batched_factors = default_backend.tile( factors, (self.batch_size or 1, 1) ) - access_field = pyhf.default_backend.concatenate( + access_field = default_backend.concatenate( self.param_viewer.index_selection, axis=1 ) self._access_field = access_field diff --git a/src/pyhf/interpolators/code0.py b/src/pyhf/interpolators/code0.py index ad3333eb31..ff972d506b 100644 --- a/src/pyhf/interpolators/code0.py +++ b/src/pyhf/interpolators/code0.py @@ -26,15 +26,16 @@ class code0: def __init__(self, histogramssets, subscribe=True): """Piecewise-linear Interpolation.""" - # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = pyhf.default_backend.astensor(histogramssets) + default_backend = pyhf.default_backend + + self._histogramssets = default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets self._deltas_up = self._histogramssets[:, :, 2] - self._histogramssets[:, :, 1] self._deltas_dn = self._histogramssets[:, :, 1] - self._histogramssets[:, :, 0] - self._broadcast_helper = pyhf.default_backend.ones( - pyhf.default_backend.shape(self._deltas_up) + self._broadcast_helper = default_backend.ones( + default_backend.shape(self._deltas_up) ) self._precompute() if subscribe: diff --git a/src/pyhf/interpolators/code1.py b/src/pyhf/interpolators/code1.py index d0245ccc09..f5bbe71ff0 100644 --- a/src/pyhf/interpolators/code1.py +++ b/src/pyhf/interpolators/code1.py @@ -27,19 +27,20 @@ class code1: def __init__(self, histogramssets, subscribe=True): """Piecewise-Exponential Interpolation.""" - # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = pyhf.default_backend.astensor(histogramssets) + default_backend = pyhf.default_backend + + self._histogramssets = default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets - self._deltas_up = pyhf.default_backend.divide( + self._deltas_up = default_backend.divide( self._histogramssets[:, :, 2], self._histogramssets[:, :, 1] ) - self._deltas_dn = pyhf.default_backend.divide( + self._deltas_dn = default_backend.divide( self._histogramssets[:, :, 0], self._histogramssets[:, :, 1] ) - self._broadcast_helper = pyhf.default_backend.ones( - pyhf.default_backend.shape(self._deltas_up) + self._broadcast_helper = default_backend.ones( + default_backend.shape(self._deltas_up) ) self._precompute() diff --git a/src/pyhf/interpolators/code2.py b/src/pyhf/interpolators/code2.py index 46a08c5757..ed305c44da 100644 --- a/src/pyhf/interpolators/code2.py +++ b/src/pyhf/interpolators/code2.py @@ -30,8 +30,9 @@ class code2: def __init__(self, histogramssets, subscribe=True): """Quadratic Interpolation.""" - # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = pyhf.default_backend.astensor(histogramssets) + default_backend = pyhf.default_backend + + self._histogramssets = default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets @@ -42,9 +43,7 @@ def __init__(self, histogramssets, subscribe=True): self._b = 0.5 * (self._histogramssets[:, :, 2] - self._histogramssets[:, :, 0]) self._b_plus_2a = self._b + 2 * self._a self._b_minus_2a = self._b - 2 * self._a - self._broadcast_helper = pyhf.default_backend.ones( - pyhf.default_backend.shape(self._a) - ) + self._broadcast_helper = default_backend.ones(default_backend.shape(self._a)) self._precompute() if subscribe: events.subscribe('tensorlib_changed')(self._precompute) diff --git a/src/pyhf/interpolators/code4.py b/src/pyhf/interpolators/code4.py index 32f95062e4..a120bdf295 100644 --- a/src/pyhf/interpolators/code4.py +++ b/src/pyhf/interpolators/code4.py @@ -33,30 +33,32 @@ class code4: def __init__(self, histogramssets, subscribe=True, alpha0=1): """Polynomial Interpolation.""" + default_backend = pyhf.default_backend + # alpha0 is assumed to be positive and non-zero. If alpha0 == 0, then # we cannot calculate the coefficients (e.g. determinant == 0) assert alpha0 > 0 self.__alpha0 = alpha0 - # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = pyhf.default_backend.astensor(histogramssets) + + self._histogramssets = default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets - self._deltas_up = pyhf.default_backend.divide( + self._deltas_up = default_backend.divide( self._histogramssets[:, :, 2], self._histogramssets[:, :, 1] ) - self._deltas_dn = pyhf.default_backend.divide( + self._deltas_dn = default_backend.divide( self._histogramssets[:, :, 0], self._histogramssets[:, :, 1] ) - self._broadcast_helper = pyhf.default_backend.ones( - pyhf.default_backend.shape(self._deltas_up) + self._broadcast_helper = default_backend.ones( + default_backend.shape(self._deltas_up) ) self._alpha0 = self._broadcast_helper * self.__alpha0 - deltas_up_alpha0 = pyhf.default_backend.power(self._deltas_up, self._alpha0) - deltas_dn_alpha0 = pyhf.default_backend.power(self._deltas_dn, self._alpha0) + deltas_up_alpha0 = default_backend.power(self._deltas_up, self._alpha0) + deltas_dn_alpha0 = default_backend.power(self._deltas_dn, self._alpha0) # x = A^{-1} b - A_inverse = pyhf.default_backend.astensor( + A_inverse = default_backend.astensor( [ [ 15.0 / (16 * alpha0), @@ -108,19 +110,19 @@ def __init__(self, histogramssets, subscribe=True, alpha0=1): ], ] ) - b = pyhf.default_backend.stack( + b = default_backend.stack( [ deltas_up_alpha0 - self._broadcast_helper, deltas_dn_alpha0 - self._broadcast_helper, - pyhf.default_backend.log(self._deltas_up) * deltas_up_alpha0, - -pyhf.default_backend.log(self._deltas_dn) * deltas_dn_alpha0, - pyhf.default_backend.power(pyhf.default_backend.log(self._deltas_up), 2) + default_backend.log(self._deltas_up) * deltas_up_alpha0, + -default_backend.log(self._deltas_dn) * deltas_dn_alpha0, + default_backend.power(default_backend.log(self._deltas_up), 2) * deltas_up_alpha0, - pyhf.default_backend.power(pyhf.default_backend.log(self._deltas_dn), 2) + default_backend.power(default_backend.log(self._deltas_dn), 2) * deltas_dn_alpha0, ] ) - self._coefficients = pyhf.default_backend.einsum( + self._coefficients = default_backend.einsum( 'rc,shb,cshb->rshb', A_inverse, self._broadcast_helper, b ) diff --git a/src/pyhf/interpolators/code4p.py b/src/pyhf/interpolators/code4p.py index a25ffb14fb..ec8b63afa7 100644 --- a/src/pyhf/interpolators/code4p.py +++ b/src/pyhf/interpolators/code4p.py @@ -19,15 +19,16 @@ class code4p: def __init__(self, histogramssets, subscribe=True): """Piecewise-Linear + Polynomial Interpolation.""" - # nb: this should never be a tensor, store in default backend (e.g. numpy) - self._histogramssets = pyhf.default_backend.astensor(histogramssets) + default_backend = pyhf.default_backend + + self._histogramssets = default_backend.astensor(histogramssets) # initial shape will be (nsysts, 1) self.alphasets_shape = (self._histogramssets.shape[0], 1) # precompute terms that only depend on the histogramssets self._deltas_up = self._histogramssets[:, :, 2] - self._histogramssets[:, :, 1] self._deltas_dn = self._histogramssets[:, :, 1] - self._histogramssets[:, :, 0] - self._broadcast_helper = pyhf.default_backend.ones( - pyhf.default_backend.shape(self._deltas_up) + self._broadcast_helper = default_backend.ones( + default_backend.shape(self._deltas_up) ) self._precompute() if subscribe: diff --git a/src/pyhf/modifiers/histosys.py b/src/pyhf/modifiers/histosys.py index 3d50d048c7..d23e6c1ff3 100644 --- a/src/pyhf/modifiers/histosys.py +++ b/src/pyhf/modifiers/histosys.py @@ -59,18 +59,20 @@ def append(self, key, channel, sample, thismod, defined_samp): ) def finalize(self): + default_backend = pyhf.default_backend + for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = pyhf.default_backend.concatenate( + sample["data"]["mask"] = default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["lo_data"] = pyhf.default_backend.concatenate( + sample["data"]["lo_data"] = default_backend.concatenate( sample["data"]["lo_data"] ) - sample["data"]["hi_data"] = pyhf.default_backend.concatenate( + sample["data"]["hi_data"] = default_backend.concatenate( sample["data"]["hi_data"] ) - sample["data"]["nom_data"] = pyhf.default_backend.concatenate( + sample["data"]["nom_data"] = default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index 10207575b3..6c65c2e8c9 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -126,6 +126,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): and at that point can be used to compute the effect of :class:`~pyhf.modifiers.shapefactor`. """ + default_backend = pyhf.default_backend self.batch_size = batch_size keys = [f'{mtype}/{m}' for m, mtype in modifiers] @@ -145,7 +146,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = pyhf.default_backend.tile( + self._access_field = default_backend.tile( global_concatenated_bin_indices, (len(shapefactor_mods), self.batch_size or 1, 1), ) diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 5675f1ea69..0ba9c2f26a 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -64,15 +64,17 @@ def append(self, key, channel, sample, thismod, defined_samp): ) def finalize(self): + default_backend = pyhf.default_backend + for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = pyhf.default_backend.concatenate( + sample["data"]["mask"] = default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["uncrt"] = pyhf.default_backend.concatenate( + sample["data"]["uncrt"] = default_backend.concatenate( sample["data"]["uncrt"] ) - sample["data"]["nom_data"] = pyhf.default_backend.concatenate( + sample["data"]["nom_data"] = default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data @@ -83,6 +85,7 @@ class shapesys_combined: op_code = 'multiplication' def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): + default_backend = pyhf.default_backend self.batch_size = batch_size keys = [f'{mtype}/{m}' for m, mtype in modifiers] @@ -97,7 +100,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] for m in keys ] - self.__shapesys_info = pyhf.default_backend.astensor( + self.__shapesys_info = default_backend.astensor( [ [ [ @@ -116,7 +119,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = pyhf.default_backend.tile( + self._access_field = default_backend.tile( global_concatenated_bin_indices, (len(self._shapesys_mods), self.batch_size or 1, 1), ) @@ -129,6 +132,8 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): events.subscribe('tensorlib_changed')(self._precompute) def _reindex_access_field(self, pdfconfig): + default_backend = pyhf.default_backend + for syst_index, syst_access in enumerate(self._access_field): if not pdfconfig.param_set(self._shapesys_mods[syst_index]).n_parameters: self._access_field[syst_index] = 0 @@ -137,14 +142,14 @@ def _reindex_access_field(self, pdfconfig): singular_sample_index = [ idx for idx, syst in enumerate( - pyhf.default_backend.astensor(self._shapesys_mask)[syst_index, :, 0] + default_backend.astensor(self._shapesys_mask)[syst_index, :, 0] ) if any(syst) ][-1] for batch_index, batch_access in enumerate(syst_access): selection = self.param_viewer.index_selection[syst_index][batch_index] - access_field_for_syst_and_batch = pyhf.default_backend.zeros( + access_field_for_syst_and_batch = default_backend.zeros( len(batch_access) ) @@ -167,6 +172,7 @@ def _precompute(self): self.shapesys_default = tensorlib.ones(tensorlib.shape(self.shapesys_mask)) def finalize(self, pdfconfig): + default_backend = pyhf.default_backend # self.__shapesys_info: (parameter, sample, [mask, nominal rate, uncertainty], bin) for mod_uncert_info, pname in zip(self.__shapesys_info, self._shapesys_mods): # skip cases where given shapesys modifier affects zero samples @@ -178,21 +184,19 @@ def finalize(self, pdfconfig): # most one sample # sample_uncert_info: ([mask, nominal rate, uncertainty], bin) sample_uncert_info = mod_uncert_info[ - pyhf.default_backend.astensor( - pyhf.default_backend.sum(mod_uncert_info[:, 0] > 0, axis=1), + default_backend.astensor( + default_backend.sum(mod_uncert_info[:, 0] > 0, axis=1), dtype='bool', ) ][0] # bin_mask: ([mask], bin) - bin_mask = pyhf.default_backend.astensor( - sample_uncert_info[0], dtype='bool' - ) + bin_mask = default_backend.astensor(sample_uncert_info[0], dtype='bool') # nom_unc: ([nominal, uncertainty], bin) nom_unc = sample_uncert_info[1:] # compute gamma**2 and sigma**2 - nom_unc_sq = pyhf.default_backend.power(nom_unc, 2) + nom_unc_sq = default_backend.power(nom_unc, 2) # when the nominal rate = 0 OR uncertainty = 0, set = 1 nom_unc_sq[nom_unc_sq == 0] = 1 # divide (gamma**2 / sigma**2) and mask to set factors for only the @@ -200,8 +204,8 @@ def finalize(self, pdfconfig): factors = (nom_unc_sq[0] / nom_unc_sq[1])[bin_mask] assert len(factors) == pdfconfig.param_set(pname).n_parameters - pdfconfig.param_set(pname).factors = pyhf.default_backend.tolist(factors) - pdfconfig.param_set(pname).auxdata = pyhf.default_backend.tolist(factors) + pdfconfig.param_set(pname).factors = default_backend.tolist(factors) + pdfconfig.param_set(pname).auxdata = default_backend.tolist(factors) def apply(self, pars): """ diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index 7190df5e98..ed96c87f7c 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -55,15 +55,17 @@ def append(self, key, channel, sample, thismod, defined_samp): ) def finalize(self): + default_backend = pyhf.default_backend + for modifier in self.builder_data.values(): for sample in modifier.values(): - sample["data"]["mask"] = pyhf.default_backend.concatenate( + sample["data"]["mask"] = default_backend.concatenate( sample["data"]["mask"] ) - sample["data"]["uncrt"] = pyhf.default_backend.concatenate( + sample["data"]["uncrt"] = default_backend.concatenate( sample["data"]["uncrt"] ) - sample["data"]["nom_data"] = pyhf.default_backend.concatenate( + sample["data"]["nom_data"] = default_backend.concatenate( sample["data"]["nom_data"] ) return self.builder_data @@ -75,6 +77,7 @@ class staterror_combined: def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): + default_backend = pyhf.default_backend self.batch_size = batch_size keys = [f'{mtype}/{m}' for m, mtype in modifiers] @@ -89,7 +92,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[builder_data[m][s]['data']['mask']] for s in pdfconfig.samples] for m in keys ] - self.__staterror_uncrt = pyhf.default_backend.astensor( + self.__staterror_uncrt = default_backend.astensor( [ [ [ @@ -107,7 +110,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None): [[j for c in pdfconfig.channels for j in range(pdfconfig.channel_nbins[c])]] ] - self._access_field = pyhf.default_backend.tile( + self._access_field = default_backend.tile( global_concatenated_bin_indices, (len(self._staterr_mods), self.batch_size or 1, 1), ) @@ -136,37 +139,36 @@ def _precompute(self): self.staterror_default = tensorlib.ones(tensorlib.shape(self.staterror_mask)) def finalize(self, pdfconfig): - staterror_mask = pyhf.default_backend.astensor(self._staterror_mask) + default_backend = pyhf.default_backend + staterror_mask = default_backend.astensor(self._staterror_mask) for this_mask, uncert_this_mod, mod in zip( staterror_mask, self.__staterror_uncrt, self._staterr_mods ): - active_nominals = pyhf.default_backend.where( + active_nominals = default_backend.where( this_mask[:, 0, :], uncert_this_mod[:, 1, :], - pyhf.default_backend.zeros(uncert_this_mod[:, 1, :].shape), + default_backend.zeros(uncert_this_mod[:, 1, :].shape), ) - summed_nominals = pyhf.default_backend.sum(active_nominals, axis=0) + summed_nominals = default_backend.sum(active_nominals, axis=0) # the below tries to filter cases in which this modifier is not # used by checking non zeroness.. should probably use mask - numerator = pyhf.default_backend.where( + numerator = default_backend.where( uncert_this_mod[:, 1, :] > 0, uncert_this_mod[:, 0, :], - pyhf.default_backend.zeros(uncert_this_mod[:, 1, :].shape), + default_backend.zeros(uncert_this_mod[:, 1, :].shape), ) - denominator = pyhf.default_backend.where( + denominator = default_backend.where( summed_nominals > 0, summed_nominals, - pyhf.default_backend.ones(uncert_this_mod[:, 1, :].shape), + default_backend.ones(uncert_this_mod[:, 1, :].shape), ) relerrs = numerator / denominator - sigmas = pyhf.default_backend.sqrt( - pyhf.default_backend.sum(pyhf.default_backend.power(relerrs, 2), axis=0) + sigmas = default_backend.sqrt( + default_backend.sum(default_backend.power(relerrs, 2), axis=0) ) assert len(sigmas[sigmas > 0]) == pdfconfig.param_set(mod).n_parameters - pdfconfig.param_set(mod).sigmas = pyhf.default_backend.tolist( - sigmas[sigmas > 0] - ) + pdfconfig.param_set(mod).sigmas = default_backend.tolist(sigmas[sigmas > 0]) def apply(self, pars): if not self.param_viewer.index_selection: diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index 532ae8ff09..89fac35e10 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -65,9 +65,11 @@ def __init__(self, **kwargs): self.factors = factors def width(self): + default_backend = pyhf.default_backend + try: - return pyhf.default_backend.sqrt( - 1.0 / pyhf.default_backend.astensor(self.factors) + return default_backend.sqrt( + 1.0 / default_backend.astensor(self.factors) ).tolist() except AttributeError: raise RuntimeError('need to know rate factor to compu') diff --git a/src/pyhf/parameters/paramview.py b/src/pyhf/parameters/paramview.py index f131d656a2..50d5017fbf 100644 --- a/src/pyhf/parameters/paramview.py +++ b/src/pyhf/parameters/paramview.py @@ -52,11 +52,13 @@ class ParamViewer: def __init__(self, shape, par_map, par_selection): + default_backend = pyhf.default_backend + batch_size = shape[0] if len(shape) > 1 else None - fullsize = pyhf.default_backend.product(pyhf.default_backend.astensor(shape)) - flat_indices = pyhf.default_backend.astensor(range(int(fullsize)), dtype='int') - self._all_indices = pyhf.default_backend.reshape(flat_indices, shape) + fullsize = default_backend.product(default_backend.astensor(shape)) + flat_indices = default_backend.astensor(range(int(fullsize)), dtype='int') + self._all_indices = default_backend.reshape(flat_indices, shape) # a tensor viewer that can split and stitch parameters self.allpar_viewer = _tensorviewer_from_parmap(par_map, batch_size) diff --git a/src/pyhf/pdf.py b/src/pyhf/pdf.py index 6a5e0e0782..ab70ede856 100644 --- a/src/pyhf/pdf.py +++ b/src/pyhf/pdf.py @@ -65,13 +65,15 @@ def append(self, channel, sample, defined_samp): self.mega_samples[sample]['nom'].append(nom) def finalize(self): - nominal_rates = pyhf.default_backend.astensor( + default_backend = pyhf.default_backend + + nominal_rates = default_backend.astensor( [ - pyhf.default_backend.concatenate(self.mega_samples[sample]['nom']) + default_backend.concatenate(self.mega_samples[sample]['nom']) for sample in self.config.samples ] ) - _nominal_rates = pyhf.default_backend.reshape( + _nominal_rates = default_backend.reshape( nominal_rates, ( 1, # modifier dimension.. nominal_rates is the base @@ -474,13 +476,15 @@ class _MainModel: """Factory class to create pdfs for the main measurement.""" def __init__(self, config, modifiers, nominal_rates, batch_size=None): + default_backend = pyhf.default_backend + self.config = config self._factor_mods = [] self._delta_mods = [] self.batch_size = batch_size - self._nominal_rates = pyhf.default_backend.tile( + self._nominal_rates = default_backend.tile( nominal_rates, (1, 1, self.batch_size or 1, 1) ) diff --git a/src/pyhf/tensor/common.py b/src/pyhf/tensor/common.py index 75257b492d..cc5de609ad 100644 --- a/src/pyhf/tensor/common.py +++ b/src/pyhf/tensor/common.py @@ -15,13 +15,15 @@ def __init__(self, indices, batch_size=None, names=None): # >>> source[target.argsort()] # array([6, 8, 9, 7]) + default_backend = pyhf.default_backend + self.batch_size = batch_size self.names = names self._partition_indices = indices - _concat_indices = pyhf.default_backend.astensor( - pyhf.default_backend.concatenate(self._partition_indices), dtype='int' + _concat_indices = default_backend.astensor( + default_backend.concatenate(self._partition_indices), dtype='int' ) - self._sorted_indices = pyhf.default_backend.tolist(_concat_indices.argsort()) + self._sorted_indices = default_backend.tolist(_concat_indices.argsort()) self._precompute() events.subscribe('tensorlib_changed')(self._precompute) @@ -65,10 +67,10 @@ def split(self, data, selection=None): def _tensorviewer_from_slices(target_slices, names, batch_size): - db = pyhf.default_backend + default_backend = pyhf.default_backend ranges = [] for sl in target_slices: - ranges.append(db.astensor(range(sl.start, sl.stop))) + ranges.append(default_backend.astensor(range(sl.start, sl.stop))) if not target_slices: return None return _TensorViewer(ranges, names=names, batch_size=batch_size) From 7e135e0b176a5e99888d9bef9d905d6996b4321f Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 07:12:53 -0400 Subject: [PATCH 21/26] Update tests/test_simplemodels.py Co-authored-by: Matthew Feickert --- tests/test_simplemodels.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index 1ad338d7b7..4863ff4920 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -39,7 +39,7 @@ def test_uncorrelated_background(backend): assert model.config.suggested_init() == [1.0, 1.0, 1.0] -# See #1654 +# See https://github.com/scikit-hep/pyhf/issues/1654 @pytest.mark.fail_pytorch @pytest.mark.fail_pytorch64 @pytest.mark.fail_tensorflow From a9c5ba18c0300bfa05650f8bc7c603687e6b0639 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 07:13:17 -0400 Subject: [PATCH 22/26] Update tests/test_simplemodels.py Co-authored-by: Matthew Feickert --- tests/test_simplemodels.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_simplemodels.py b/tests/test_simplemodels.py index 4863ff4920..51dfe60c47 100644 --- a/tests/test_simplemodels.py +++ b/tests/test_simplemodels.py @@ -58,7 +58,7 @@ def test_correlated_background_default_backend(default_backend): assert model.config.suggested_init() == [0.0, 1.0] -# See #1654 +# See https://github.com/scikit-hep/pyhf/issues/1654 @pytest.mark.fail_pytorch @pytest.mark.fail_pytorch64 @pytest.mark.fail_tensorflow From 8977db8e2cd7ff4dab199a412020187e3435daab Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 04:15:49 -0700 Subject: [PATCH 23/26] remove optimize from public api --- src/pyhf/__init__.py | 2 -- tests/test_public_api_repr.py | 1 - 2 files changed, 3 deletions(-) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index c8c496de71..453252e766 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,5 +1,4 @@ from pyhf.tensor import BackendRetriever as tensor -from pyhf.optimize import OptimizerRetriever as optimize from pyhf.tensor.manager import get_backend from pyhf.tensor.manager import set_backend from pyhf._version import version as __version__ @@ -23,7 +22,6 @@ "infer", "interpolators", "modifiers", - "optimize", "optimizer", "parameters", "patchset", diff --git a/tests/test_public_api_repr.py b/tests/test_public_api_repr.py index e59370e476..ccb4738d63 100644 --- a/tests/test_public_api_repr.py +++ b/tests/test_public_api_repr.py @@ -19,7 +19,6 @@ def test_top_level_public_api(): "infer", "interpolators", "modifiers", - "optimize", "optimizer", "parameters", "patchset", From 399665f9505b9c169973b1caecf1fd8346bbf67d Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 04:18:03 -0700 Subject: [PATCH 24/26] move only events up --- src/pyhf/tensor/manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index cabb9a6f7c..4e161c8566 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -2,6 +2,7 @@ from pyhf.tensor import BackendRetriever from pyhf import exceptions +from pyhf import events this = sys.modules[__name__] this.state = { @@ -33,7 +34,6 @@ def get_backend(default=False): from pyhf.optimize import OptimizerRetriever -from pyhf import events this.state['default'] = ( BackendRetriever.numpy_backend(), From 500d92362bcb926255c9264e68f9f81be21f00e7 Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 04:19:43 -0700 Subject: [PATCH 25/26] move import up --- src/pyhf/tensor/manager.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pyhf/tensor/manager.py b/src/pyhf/tensor/manager.py index 4e161c8566..7eedd26a36 100644 --- a/src/pyhf/tensor/manager.py +++ b/src/pyhf/tensor/manager.py @@ -3,6 +3,7 @@ from pyhf.tensor import BackendRetriever from pyhf import exceptions from pyhf import events +from pyhf.optimize import OptimizerRetriever this = sys.modules[__name__] this.state = { @@ -33,8 +34,6 @@ def get_backend(default=False): return this.state['default' if default else 'current'] -from pyhf.optimize import OptimizerRetriever - this.state['default'] = ( BackendRetriever.numpy_backend(), OptimizerRetriever.scipy_optimizer(), From 972435c1b9cb99c423f2ae957e72ad0692f6396d Mon Sep 17 00:00:00 2001 From: Giordon Stark Date: Fri, 22 Oct 2021 04:35:51 -0700 Subject: [PATCH 26/26] wworking? --- src/pyhf/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 453252e766..4ed8fce762 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -1,4 +1,5 @@ from pyhf.tensor import BackendRetriever as tensor +from pyhf.optimize import OptimizerRetriever as optimize # noqa from pyhf.tensor.manager import get_backend from pyhf.tensor.manager import set_backend from pyhf._version import version as __version__