Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Configurable default backend #1646

Merged
merged 31 commits into from
Oct 22, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
c9d2f25
move get_backend/set_backend out of init, and refactor things slightl…
kratsg Oct 15, 2021
14e332d
fix some codefactors
kratsg Oct 15, 2021
0df8642
use non-globals
kratsg Oct 15, 2021
fe576d9
flake8/lint
kratsg Oct 15, 2021
c531b56
dropped an import, whoops
kratsg Oct 15, 2021
a70b8c2
fix up optimize
kratsg Oct 15, 2021
2f484f3
raise attributeerror
kratsg Oct 15, 2021
ec2e8f0
drop from docs/api
kratsg Oct 15, 2021
08dfa02
minor fixes
kratsg Oct 15, 2021
bb7b26f
tests
kratsg Oct 18, 2021
0606092
handle other error
kratsg Oct 18, 2021
b011275
Merge branch 'master' into feat/configurable-default-backend
kratsg Oct 18, 2021
ce6ec11
Merge branch 'master' into feat/configurable-default-backend
kratsg Oct 18, 2021
5455fa3
fix up
kratsg Oct 18, 2021
66ab686
fix up
kratsg Oct 19, 2021
7d759f6
fix up more
kratsg Oct 19, 2021
97dcde7
fix last test
kratsg Oct 19, 2021
91c5419
last last last fix
kratsg Oct 19, 2021
75a8978
Merge branch 'master' into feat/configurable-default-backend
kratsg Oct 19, 2021
7eeeec5
make sure we test all backgrounds with default backend
kratsg Oct 19, 2021
8c2ee8e
add tests to handle the use case
kratsg Oct 19, 2021
8bdc252
xfail tests
kratsg Oct 19, 2021
45415cf
use db=pyhf.db
kratsg Oct 20, 2021
86c61fb
Merge branch 'master' into feat/configurable-default-backend
matthewfeickert Oct 22, 2021
7e135e0
Update tests/test_simplemodels.py
kratsg Oct 22, 2021
a9c5ba1
Update tests/test_simplemodels.py
kratsg Oct 22, 2021
8977db8
remove optimize from public api
kratsg Oct 22, 2021
399665f
move only events up
kratsg Oct 22, 2021
500d923
move import up
kratsg Oct 22, 2021
f01cf38
Merge branch 'master' into feat/configurable-default-backend
kratsg Oct 22, 2021
972435c
wworking?
kratsg Oct 22, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ Top-Level
:toctree: _generated/
:nosignatures:

default_backend
default_optimizer
tensorlib
optimizer
get_backend
Expand Down
164 changes: 14 additions & 150 deletions src/pyhf/__init__.py
Original file line number Diff line number Diff line change
@@ -1,155 +1,8 @@
from pyhf.tensor import BackendRetriever as tensor
from pyhf.optimize import OptimizerRetriever as optimize
from pyhf.optimize import OptimizerRetriever as optimize # noqa
from pyhf.tensor.manager import get_backend
from pyhf.tensor.manager import set_backend
from pyhf._version import version as __version__
from pyhf.exceptions import InvalidBackend, InvalidOptimizer, Unsupported
from pyhf import events

tensorlib = None
optimizer = None


def get_backend():
"""
Get the current backend and the associated optimizer

Example:
>>> import pyhf
>>> backend, optimizer = pyhf.get_backend()
>>> backend
<pyhf.tensor.numpy_backend.numpy_backend object at 0x...>
>>> optimizer
<pyhf.optimize.scipy_optimizer object at 0x...>

Returns:
backend, optimizer
"""
global tensorlib
global optimizer
return tensorlib, optimizer


tensorlib = tensor.numpy_backend()
default_backend = tensorlib
optimizer = optimize.scipy_optimizer()
default_optimizer = optimizer


@events.register('change_backend')
def set_backend(backend, custom_optimizer=None, precision=None):
"""
Set the backend and the associated optimizer

Example:
>>> import pyhf
>>> pyhf.set_backend("tensorflow")
>>> pyhf.tensorlib.name
'tensorflow'
>>> pyhf.tensorlib.precision
'64b'
>>> pyhf.set_backend(b"pytorch", precision="32b")
>>> pyhf.tensorlib.name
'pytorch'
>>> pyhf.tensorlib.precision
'32b'
>>> pyhf.set_backend(pyhf.tensor.numpy_backend())
>>> pyhf.tensorlib.name
'numpy'
>>> pyhf.tensorlib.precision
'64b'

Args:
backend (:obj:`str` or `pyhf.tensor` backend): One of the supported pyhf backends: NumPy, TensorFlow, PyTorch, and JAX
custom_optimizer (`pyhf.optimize` optimizer): Optional custom optimizer defined by the user
precision (:obj:`str`): Floating point precision to use in the backend: ``64b`` or ``32b``. Default is backend dependent.

Returns:
None
"""
global tensorlib
global optimizer

_supported_precisions = ["32b", "64b"]
backend_kwargs = {}

if isinstance(precision, (str, bytes)):
if isinstance(precision, bytes):
precision = precision.decode("utf-8")
precision = precision.lower()

if isinstance(backend, (str, bytes)):
if isinstance(backend, bytes):
backend = backend.decode("utf-8")
backend = backend.lower()

if precision is not None:
backend_kwargs["precision"] = precision

try:
backend = getattr(tensor, f"{backend:s}_backend")(**backend_kwargs)
except TypeError:
raise InvalidBackend(
f"The backend provided is not supported: {backend:s}. Select from one of the supported backends: numpy, tensorflow, pytorch"
)

_name_supported = getattr(tensor, f"{backend.name:s}_backend")
if _name_supported:
if not isinstance(backend, _name_supported):
raise AttributeError(
f"'{backend.name:s}' is not a valid name attribute for backend type {type(backend)}\n Custom backends must have names unique from supported backends"
)
if backend.precision not in _supported_precisions:
raise Unsupported(
f"The backend precision provided is not supported: {backend.precision:s}. Select from one of the supported precisions: {', '.join([str(v) for v in _supported_precisions])}"
)
# If "precision" arg passed, it should always win
# If no "precision" arg, defer to tensor backend object API if set there
if precision is not None:
if backend.precision != precision:
backend_kwargs["precision"] = precision
backend = getattr(tensor, f"{backend.name:s}_backend")(**backend_kwargs)

# need to determine if the tensorlib changed or the optimizer changed for events
tensorlib_changed = bool(
(backend.name != tensorlib.name) | (backend.precision != tensorlib.precision)
)
optimizer_changed = False

if custom_optimizer:
if isinstance(custom_optimizer, (str, bytes)):
if isinstance(custom_optimizer, bytes):
custom_optimizer = custom_optimizer.decode("utf-8")
try:
new_optimizer = getattr(
optimize, f"{custom_optimizer.lower()}_optimizer"
)()
except TypeError:
raise InvalidOptimizer(
f"The optimizer provided is not supported: {custom_optimizer}. Select from one of the supported optimizers: scipy, minuit"
)
else:
_name_supported = getattr(optimize, f"{custom_optimizer.name:s}_optimizer")
if _name_supported:
if not isinstance(custom_optimizer, _name_supported):
raise AttributeError(
f"'{custom_optimizer.name}' is not a valid name attribute for optimizer type {type(custom_optimizer)}\n Custom optimizers must have names unique from supported optimizers"
)
new_optimizer = custom_optimizer

else:
new_optimizer = optimize.scipy_optimizer()

optimizer_changed = bool(optimizer != new_optimizer)
# set new backend
tensorlib = backend
optimizer = new_optimizer
# trigger events
if tensorlib_changed:
events.trigger("tensorlib_changed")()
if optimizer_changed:
events.trigger("optimizer_changed")()
# set up any other globals for backend
tensorlib._setup()


from pyhf.pdf import Model
from pyhf.workspace import Workspace
Expand All @@ -164,6 +17,7 @@ def set_backend(backend, custom_optimizer=None, precision=None):
"Workspace",
"__version__",
"compat",
"default_backend",
"exceptions",
"get_backend",
"infer",
Expand All @@ -185,3 +39,13 @@ def set_backend(backend, custom_optimizer=None, precision=None):

def __dir__():
return __all__


def __getattr__(name):
if name == 'tensorlib':
return get_backend(default=False)[0]
if name == 'optimizer':
return get_backend(default=False)[1]
if name == 'default_backend':
return get_backend(default=True)[0]
raise AttributeError
7 changes: 6 additions & 1 deletion src/pyhf/constraints.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
Comment on lines +1 to +2
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just so I understand, this is necessary to avoid

ImportError: cannot import name 'default_backend' from partially initialized module 'pyhf' (most likely due to a circular import)

because pyhf.defaul_backend needs to be evaluated as an attr and so doesn't exist as a static attribute? Or am I just missing the obvious here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Global vs non-global. You don't want to evaluate default_backend at import time.

from pyhf import events
from pyhf import probability as prob
from pyhf.parameters import ParamViewer
Expand All @@ -12,6 +13,8 @@ def __dir__():

class gaussian_constraint_combined:
def __init__(self, pdfconfig, batch_size=None):
default_backend = pyhf.default_backend

self.batch_size = batch_size
# iterate over all constraints order doesn't matter....

Expand Down Expand Up @@ -144,6 +147,8 @@ def logpdf(self, auxdata, pars):

class poisson_constraint_combined:
def __init__(self, pdfconfig, batch_size=None):
default_backend = pyhf.default_backend

self.batch_size = batch_size
# iterate over all constraints order doesn't matter....

Expand Down
6 changes: 4 additions & 2 deletions src/pyhf/interpolators/code0.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Piecewise-linear Interpolation. (Code 0)."""
import logging
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper

Expand All @@ -25,7 +26,8 @@ class code0:

def __init__(self, histogramssets, subscribe=True):
"""Piecewise-linear Interpolation."""
# nb: this should never be a tensor, store in default backend (e.g. numpy)
default_backend = pyhf.default_backend

self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
Expand Down
6 changes: 4 additions & 2 deletions src/pyhf/interpolators/code1.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Piecewise-Exponential Interpolation (Code 1)."""
import logging
import math
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper

Expand All @@ -26,7 +27,8 @@ class code1:

def __init__(self, histogramssets, subscribe=True):
"""Piecewise-Exponential Interpolation."""
# nb: this should never be a tensor, store in default backend (e.g. numpy)
default_backend = pyhf.default_backend

self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
Expand Down
6 changes: 4 additions & 2 deletions src/pyhf/interpolators/code2.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Quadratic Interpolation (Code 2)."""
import logging
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper

Expand Down Expand Up @@ -29,7 +30,8 @@ class code2:

def __init__(self, histogramssets, subscribe=True):
"""Quadratic Interpolation."""
# nb: this should never be a tensor, store in default backend (e.g. numpy)
default_backend = pyhf.default_backend

self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
Expand Down
7 changes: 5 additions & 2 deletions src/pyhf/interpolators/code4.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Polynomial Interpolation (Code 4)."""
import logging
import math
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper

Expand Down Expand Up @@ -32,11 +33,13 @@ class code4:

def __init__(self, histogramssets, subscribe=True, alpha0=1):
"""Polynomial Interpolation."""
default_backend = pyhf.default_backend

# alpha0 is assumed to be positive and non-zero. If alpha0 == 0, then
# we cannot calculate the coefficients (e.g. determinant == 0)
assert alpha0 > 0
self.__alpha0 = alpha0
# nb: this should never be a tensor, store in default backend (e.g. numpy)

self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
Expand Down
6 changes: 4 additions & 2 deletions src/pyhf/interpolators/code4p.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Piecewise-Linear + Polynomial Interpolation (Code 4p)."""
import logging
from pyhf import get_backend, default_backend
import pyhf
from pyhf.tensor.manager import get_backend
from pyhf import events
from pyhf.interpolators import _slow_interpolator_looper

Expand All @@ -18,7 +19,8 @@ class code4p:

def __init__(self, histogramssets, subscribe=True):
"""Piecewise-Linear + Polynomial Interpolation."""
# nb: this should never be a tensor, store in default backend (e.g. numpy)
default_backend = pyhf.default_backend

self._histogramssets = default_backend.astensor(histogramssets)
# initial shape will be (nsysts, 1)
self.alphasets_shape = (self._histogramssets.shape[0], 1)
Expand Down
6 changes: 5 additions & 1 deletion src/pyhf/modifiers/histosys.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging

from pyhf import get_backend, default_backend, events
import pyhf
from pyhf import events
from pyhf.tensor.manager import get_backend
from pyhf import interpolators
from pyhf.parameters import ParamViewer

Expand Down Expand Up @@ -57,6 +59,8 @@ def append(self, key, channel, sample, thismod, defined_samp):
)

def finalize(self):
default_backend = pyhf.default_backend

for modifier in self.builder_data.values():
for sample in modifier.values():
sample["data"]["mask"] = default_backend.concatenate(
Expand Down
5 changes: 4 additions & 1 deletion src/pyhf/modifiers/shapefactor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import logging

from pyhf import get_backend, default_backend, events
import pyhf
from pyhf import events
from pyhf.tensor.manager import get_backend
from pyhf.parameters import ParamViewer

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -124,6 +126,7 @@ def __init__(self, modifiers, pdfconfig, builder_data, batch_size=None):
and at that point can be used to compute the effect of
:class:`~pyhf.modifiers.shapefactor`.
"""
default_backend = pyhf.default_backend

self.batch_size = batch_size
keys = [f'{mtype}/{m}' for m, mtype in modifiers]
Expand Down
Loading