Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove numdifftools #3485

Merged
merged 2 commits into from
May 21, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@

- `nuts_kwargs` and `step_kwargs` have been deprecated in favor of using the standard `kwargs` to pass optional step method arguments.
- `SGFS` and `CSG` have been removed (Fix for [#3353](https://github.com/pymc-devs/pymc3/issues/3353)). They have been moved to [pymc3-experimental](https://github.com/pymc-devs/pymc3-experimental).
- References to `live_plot` and corresponding notebooks have been removed.
- References to `live_plot` and corresponding notebooks have been removed.
- Function `approx_hessian` was removed, due to `numdifftools` becoming incompatible with current `scipy`. The function was already optional, only available to a user who installed `numdifftools` separately, and not hit on any common codepaths. [#3485](https://github.com/pymc-devs/pymc3/pull/3485).

## PyMC3 3.6 (Dec 21 2018)

Expand Down
43 changes: 5 additions & 38 deletions pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

from .helpers import SeededTest, select_by_precision
from ..vartypes import continuous_types
from ..model import Model, Point, Potential, Deterministic
from ..blocking import DictToVarBijection, DictToArrayBijection, ArrayOrdering
from ..model import Model, Point, Deterministic
from ..blocking import DictToVarBijection
from ..distributions import (
DensityDist, Categorical, Multinomial, VonMises, Dirichlet,
MvStudentT, MvNormal, MatrixNormal, ZeroInflatedPoisson,
Expand Down Expand Up @@ -471,37 +471,9 @@ def check_int_to_1(self, model, value, domain, paramdomains):
area = integrate_nd(pdfx, domain, value.dshape, value.dtype)
assert_almost_equal(area, 1, err_msg=str(pt))

def check_dlogp(self, model, value, domain, paramdomains):
try:
from numdifftools import Gradient
except ImportError:
return
if not model.cont_vars:
return

domains = paramdomains.copy()
domains['value'] = domain
bij = DictToArrayBijection(
ArrayOrdering(model.cont_vars), model.test_point)
dlogp = bij.mapf(model.fastdlogp(model.cont_vars))
logp = bij.mapf(model.fastlogp)

def wrapped_logp(x):
try:
return logp(x)
except:
return np.nan

ndlogp = Gradient(wrapped_logp)
for pt in product(domains, n_samples=100):
pt = Point(pt, model=model)
pt = bij.map(pt)
decimals = select_by_precision(float64=6, float32=4)
assert_almost_equal(dlogp(pt), ndlogp(pt), decimal=decimals, err_msg=str(pt))

def checkd(self, distfam, valuedomain, vardomains, checks=None, extra_args=None):
if checks is None:
checks = (self.check_int_to_1, self.check_dlogp)
checks = (self.check_int_to_1, )

if extra_args is None:
extra_args = {}
Expand Down Expand Up @@ -940,7 +912,8 @@ def test_wishart(self, n):
# This check compares the autodiff gradient to the numdiff gradient.
# However, due to the strict constraints of the wishart,
# it is impossible to numerically determine the gradient as a small
# pertubation breaks the symmetry. Thus disabling.
# pertubation breaks the symmetry. Thus disabling. Also, numdifftools was
# removed in June 2019, so an alternative would be needed.
#
# self.checkd(Wishart, PdMatrix(n), {'n': Domain([2, 3, 4, 2000]), 'V': PdMatrix(n)},
# checks=[self.check_dlogp])
Expand Down Expand Up @@ -1120,12 +1093,6 @@ def logp(x):
return -log(2 * .5) - abs(x - .5) / .5
self.checkd(DensityDist, R, {}, extra_args={'logp': logp})

def test_addpotential(self):
with Model() as model:
value = Normal('value', 1, 1)
Potential('value_squared', -value ** 2)
self.check_dlogp(model, value, R, {})

def test_get_tau_sigma(self):
sigma = np.array([2])
assert_almost_equal(continuous.get_tau_sigma(sigma=sigma), [1. / sigma**2, sigma])
Expand Down
2 changes: 1 addition & 1 deletion pymc3/tuning/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
from .starting import find_MAP
from .scaling import approx_hessian, find_hessian, trace_cov, guess_scaling
from .scaling import find_hessian, trace_cov, guess_scaling
36 changes: 1 addition & 35 deletions pymc3/tuning/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,41 +4,7 @@
from ..theanof import hessian_diag, inputvars
from ..blocking import DictToArrayBijection, ArrayOrdering

__all__ = ['approx_hessian', 'find_hessian', 'trace_cov', 'guess_scaling']


def approx_hessian(point, vars=None, model=None):
"""
Returns an approximation of the Hessian at the current chain location.

Parameters
----------
model : Model (optional if in `with` context)
point : dict
vars : list
Variables for which Hessian is to be calculated.
"""
from numdifftools import Jacobian

model = modelcontext(model)
if vars is None:
vars = model.cont_vars
vars = inputvars(vars)

point = Point(point, model=model)

bij = DictToArrayBijection(ArrayOrdering(vars), point)
dlogp = bij.mapf(model.fastdlogp(vars))

def grad_logp(point):
return np.nan_to_num(dlogp(point))

'''
Find the jacobian of the gradient function at the current position
this should be the Hessian; invert it to find the approximate
covariance matrix.
'''
return -Jacobian(grad_logp)(bij.map(point))
__all__ = ['find_hessian', 'trace_cov', 'guess_scaling']


def fixed_hessian(point, vars=None, model=None):
Expand Down
1 change: 0 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ Keras>=2.0.8
nbsphinx>=0.2.13
nose>=1.3.7
nose-parameterized==0.6.0
numdifftools>=0.9.20
numpy>=1.13.0
numpydoc==0.7.0
pycodestyle>=2.3.1
Expand Down