Skip to content

Commit

Permalink
Merge pull request #216 from python-adaptive/data_on_grid
Browse files Browse the repository at this point in the history
2D: add interpolated_on_grid method
  • Loading branch information
basnijholt authored Oct 10, 2019
2 parents 6914a78 + 6794a06 commit 1a64676
Show file tree
Hide file tree
Showing 3 changed files with 127 additions and 53 deletions.
174 changes: 123 additions & 51 deletions adaptive/learner/learner2D.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-

import itertools
import warnings
from collections import OrderedDict
from copy import copy
from math import sqrt
Expand All @@ -26,7 +27,7 @@ def deviations(ip):
Returns
-------
numpy array
deviations : list
The deviation per triangle.
"""
values = ip.values / (ip.values.ptp(axis=0).max() or 1)
Expand Down Expand Up @@ -64,7 +65,7 @@ def areas(ip):
Returns
-------
numpy array
areas : numpy.ndarray
The area per triangle in ``ip.tri``.
"""
p = ip.tri.points[ip.tri.vertices]
Expand All @@ -78,16 +79,27 @@ def uniform_loss(ip):
Works with `~adaptive.Learner2D` only.
Parameters
----------
ip : `scipy.interpolate.LinearNDInterpolator` instance
Returns
-------
losses : numpy.ndarray
Loss per triangle in ``ip.tri``.
Examples
--------
>>> from adaptive.learner.learner2D import uniform_loss
>>> def f(xy):
... x, y = xy
... return x**2 + y**2
>>>
>>> learner = adaptive.Learner2D(f,
... bounds=[(-1, -1), (1, 1)],
... loss_per_triangle=uniform_loss)
>>> learner = adaptive.Learner2D(
... f,
... bounds=[(-1, -1), (1, 1)],
... loss_per_triangle=uniform_loss,
... )
>>>
"""
return np.sqrt(areas(ip))
Expand All @@ -102,17 +114,18 @@ def resolution_loss_function(min_distance=0, max_distance=1):
The arguments `min_distance` and `max_distance` should be in between 0 and 1
because the total area is normalized to 1.
Returns
-------
loss_function : callable
Examples
--------
>>> def f(xy):
... x, y = xy
... return x**2 + y**2
>>>
>>> loss = resolution_loss_function(min_distance=0.01, max_distance=1)
>>> learner = adaptive.Learner2D(f,
... bounds=[(-1, -1), (1, 1)],
... loss_per_triangle=loss)
>>>
>>> learner = adaptive.Learner2D(f, bounds=[(-1, -1), (1, 1)], loss_per_triangle=loss)
"""

def resolution_loss(ip):
Expand All @@ -132,12 +145,21 @@ def resolution_loss(ip):


def minimize_triangle_surface_loss(ip):
"""Loss function that is similar to the default loss function in the
"""Loss function that is similar to the distance loss function in the
`~adaptive.Learner1D`. The loss is the area spanned by the 3D
vectors of the vertices.
Works with `~adaptive.Learner2D` only.
Parameters
----------
ip : `scipy.interpolate.LinearNDInterpolator` instance
Returns
-------
losses : numpy.ndarray
Loss per triangle in ``ip.tri``.
Examples
--------
>>> from adaptive.learner.learner2D import minimize_triangle_surface_loss
Expand Down Expand Up @@ -169,6 +191,19 @@ def _get_vectors(points):


def default_loss(ip):
"""Loss function that combines `deviations` and `areas` of the triangles.
Works with `~adaptive.Learner2D` only.
Parameters
----------
ip : `scipy.interpolate.LinearNDInterpolator` instance
Returns
-------
losses : numpy.ndarray
Loss per triangle in ``ip.tri``.
"""
dev = np.sum(deviations(ip), axis=0)
A = areas(ip)
losses = dev * np.sqrt(A) + 0.3 * A
Expand All @@ -186,15 +221,15 @@ def choose_point_in_triangle(triangle, max_badness):
Parameters
----------
triangle : numpy array
The coordinates of a triangle with shape (3, 2)
triangle : numpy.ndarray
The coordinates of a triangle with shape (3, 2).
max_badness : int
The badness at which the point is either chosen on a edge or
in the middle.
Returns
-------
point : numpy array
point : numpy.ndarray
The x and y coordinate of the suggested new point.
"""
a, b, c = triangle
Expand Down Expand Up @@ -230,7 +265,6 @@ class Learner2D(BaseLearner):
triangle area, to determine the loss. See the notes
for more details.
Attributes
----------
data : dict
Expand All @@ -248,12 +282,6 @@ class Learner2D(BaseLearner):
triangles will be stretched along ``x``, otherwise
along ``y``.
Methods
-------
data_combined : dict
Sampled points and values so far including
the unknown interpolated points in `pending_points`.
Notes
-----
Adapted from an initial implementation by Pauli Virtanen.
Expand Down Expand Up @@ -345,6 +373,38 @@ def bounds_are_done(self):
(p in self.pending_points or p in self._stack) for p in self._bounds_points
)

def interpolated_on_grid(self, n=None):
"""Get the interpolated data on a grid.
Parameters
----------
n : int, optional
Number of points in x and y. If None (default) this number is
evaluated by looking at the size of the smallest triangle.
Returns
-------
xs : 1D numpy.ndarray
ys : 1D numpy.ndarray
interpolated_on_grid : 2D numpy.ndarray
"""
ip = self.interpolator(scaled=True)
if n is None:
# Calculate how many grid points are needed.
# factor from A=√3/4 * a² (equilateral triangle)
n = int(0.658 / sqrt(areas(ip).min()))
n = max(n, 10)

# The bounds of the linspace should be (-0.5, 0.5) but because of
# numerical precision problems it could (for example) be
# (-0.5000000000000001, 0.49999999999999983), then any point at exact
# boundary would be outside of the domain. See #181.
eps = 1e-13
xs = ys = np.linspace(-0.5 + eps, 0.5 - eps, n)
zs = ip(xs[:, None], ys[None, :] * self.aspect_ratio).squeeze()
xs, ys = self._unscale(np.vstack([xs, ys]).T).T
return xs, ys, zs

def _data_in_bounds(self):
if self.data:
points = np.array(list(self.data.keys()))
Expand All @@ -358,7 +418,8 @@ def _data_interp(self):
if self.pending_points:
points = list(self.pending_points)
if self.bounds_are_done:
values = self.ip()(self._scale(points))
ip = self.interpolator(scaled=True)
values = ip(self._scale(points))
else:
# Without the bounds the interpolation cannot be done properly,
# so we just set everything to zero.
Expand All @@ -375,23 +436,47 @@ def _data_combined(self):
values_combined = np.vstack([values, values_interp])
return points_combined, values_combined

def data_combined(self):
"""Like `data`, however this includes the points in
`pending_points` for which the values are interpolated."""
# Interpolate the unfinished points
points, values = self._data_combined()
return {tuple(k): v for k, v in zip(points, values)}

def ip(self):
"""Deprecated, use `self.interpolator(scaled=True)`"""
warnings.warn(
"`learner.ip()` is deprecated, use `learner.interpolator(scaled=True)`."
" This will be removed in v1.0.",
DeprecationWarning,
)
return self.interpolator(scaled=True)

def interpolator(self, *, scaled=False):
"""A `scipy.interpolate.LinearNDInterpolator` instance
containing the learner's data."""
if self._ip is None:
containing the learner's data.
Parameters
----------
scaled : bool
Use True if all points are inside the
unit-square [(-0.5, 0.5), (-0.5, 0.5)] or False if
the data points are inside the ``learner.bounds``.
Returns
-------
interpolator : `scipy.interpolate.LinearNDInterpolator`
Examples
--------
>>> xs, ys = [np.linspace(*b, num=100) for b in learner.bounds]
>>> ip = learner.interpolator()
>>> zs = ip(xs[:, None], ys[None, :])
"""
if scaled:
if self._ip is None:
points, values = self._data_in_bounds()
points = self._scale(points)
self._ip = interpolate.LinearNDInterpolator(points, values)
return self._ip
else:
points, values = self._data_in_bounds()
points = self._scale(points)
self._ip = interpolate.LinearNDInterpolator(points, values)
return self._ip
return interpolate.LinearNDInterpolator(points, values)

def ip_combined(self):
def _interpolator_combined(self):
"""A `scipy.interpolate.LinearNDInterpolator` instance
containing the learner's data *and* interpolated data of
the `pending_points`."""
Expand Down Expand Up @@ -428,7 +513,7 @@ def _fill_stack(self, stack_till=1):
raise ValueError("too few points...")

# Interpolate
ip = self.ip_combined()
ip = self._interpolator_combined()

losses = self.loss_per_triangle(ip)

Expand Down Expand Up @@ -496,7 +581,7 @@ def ask(self, n, tell_pending=True):
def loss(self, real=True):
if not self.bounds_are_done:
return np.inf
ip = self.ip() if real else self.ip_combined()
ip = self.interpolator(scaled=True) if real else self._interpolator_combined()
losses = self.loss_per_triangle(ip)
return losses.max()

Expand Down Expand Up @@ -541,21 +626,8 @@ def plot(self, n=None, tri_alpha=0):
lbrt = x[0], y[0], x[1], y[1]

if len(self.data) >= 4:
ip = self.ip()

if n is None:
# Calculate how many grid points are needed.
# factor from A=√3/4 * a² (equilateral triangle)
n = int(0.658 / sqrt(areas(ip).min()))
n = max(n, 10)

# The bounds of the linspace should be (-0.5, 0.5) but because of
# numerical precision problems it could (for example) be
# (-0.5000000000000001, 0.49999999999999983), then any point at exact
# boundary would be outside of the domain. See #181.
eps = 1e-13
x = y = np.linspace(-0.5 + eps, 0.5 - eps, n)
z = ip(x[:, None], y[None, :] * self.aspect_ratio).squeeze()
ip = self.interpolator(scaled=True)
x, y, z = self.interpolated_on_grid(n)

if self.vdim > 1:
ims = {
Expand Down
4 changes: 3 additions & 1 deletion adaptive/tests/test_learners.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,9 @@ def test_expected_loss_improvement_is_less_than_total_loss(
_, loss_improvements = learner.ask(M)

if learner_type is Learner2D:
assert sum(loss_improvements) < sum(learner.loss_per_triangle(learner.ip()))
assert sum(loss_improvements) < sum(
learner.loss_per_triangle(learner.interpolator(scaled=True))
)
elif learner_type is Learner1D:
assert sum(loss_improvements) < sum(learner.losses.values())
elif learner_type is AverageLearner:
Expand Down
2 changes: 1 addition & 1 deletion docs/logo.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def ring(xy):

def plot_learner_and_save(learner, fname):
fig, ax = plt.subplots()
tri = learner.ip().tri
tri = learner.interpolator(scaled=True).tri
triang = mtri.Triangulation(*tri.points.T, triangles=tri.vertices)
ax.triplot(triang, c="k", lw=0.8)
ax.imshow(learner.plot().Image.I.data, extent=(-0.5, 0.5, -0.5, 0.5))
Expand Down

0 comments on commit 1a64676

Please sign in to comment.