Skip to content
Snippets Groups Projects

Resolve "(Learner1D) add possibility to use the direct neighbors in the loss"

Merged Jorn Hoofwijk requested to merge 119-add-second-order-loss-to-adaptive into master
Compare and Show latest version
1 file
+ 57
6
Compare changes
  • Side-by-side
  • Inline
@@ -15,14 +15,65 @@ from ..notebook_integration import ensure_holoviews
from ..utils import cache_latest
def annotate_with_nn_neighbors(nn_neighbors):
def use_nn_neighbors(n):
"""Decorator to specify how many neighboring intervals the loss function uses.
This decorator can wrap around a loss function to let `~adaptive.Learner1D`
know that you would like to look at the N-nearest neighboring intervals.
The loss function is then guaranteed to receive the data of at least the
nn-neighbors and a dict that tells you what the neighboring points of these
are. And the Learner1D will then make sure that the loss is updated whenever
on of the nn-neighbours changes.
Examples
--------
This is a part of the curvature loss function
>>> @use_nn_neighbors(1)
...def triangle_loss(interval, scale, data, neighbors):
... x_left, x_right = interval
... xs = [neighbors[x_left][0], x_left, x_right, neighbors[x_right][1]]
... # at the boundary, neighbours[<left boundary x>] is (None, <some other x>)
... xs = [x for x in xs if x is not None]
... if len(xs) <= 2:
... return (x_right - x_left) / scale[0]
...
... y_scale = scale[1] or 1
... ys_scaled = [data[x] / y_scale for x in xs]
... xs_scaled = [x / scale[0] for x in xs]
... N = len(xs) - 2
... pts = [(x, y) for x, y in zip(xs_scaled, ys_scaled)]
... return sum(volume(pts[i:i+3]) for i in range(N)) / N
Or you may define a loss that favours the (local) minima of a function.
>>>@use_nn_neighbors(1)
...def loss(interval, scale, data, neighbors):
... x_left, x_right = interval
... n_left = neighbors[x_left][0]
... n_right = neighbors[x_right][1]
... is_min = True
...
... if n_left is not None and data[x_left] > data[n_left]:
... is_min = False
... if n_right is not None and data[x_right] > data[n_right]:
... is_min = False
...
... loss = (x_right - x_left) / scale[0]
...
... if is_min:
... return loss * 100
... return loss
"""
def _wrapped(loss_per_interval):
loss_per_interval.nn_neighbors = nn_neighbors
loss_per_interval.nn_neighbors = n
return loss_per_interval
return _wrapped
@annotate_with_nn_neighbors(0)
@use_nn_neighbors(0)
def uniform_loss(interval, scale, data, neighbors):
"""Loss function that samples the domain uniformly.
@@ -44,7 +95,7 @@ def uniform_loss(interval, scale, data, neighbors):
return dx
@annotate_with_nn_neighbors(0)
@use_nn_neighbors(0)
def default_loss(interval, scale, data, neighbors):
"""Calculate loss on a single interval.
@@ -79,7 +130,7 @@ def _loss_of_multi_interval(xs, ys):
return sum(vol(pts[i:i+3]) for i in range(N)) / N
@annotate_with_nn_neighbors(1)
@use_nn_neighbors(1)
def triangle_loss(interval, scale, data, neighbors):
x_left, x_right = interval
xs = [neighbors[x_left][0], x_left, x_right, neighbors[x_right][1]]
@@ -95,7 +146,7 @@ def triangle_loss(interval, scale, data, neighbors):
def get_curvature_loss(area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
@annotate_with_nn_neighbors(1)
@use_nn_neighbors(1)
def curvature_loss(interval, scale, data, neighbors):
triangle_loss_ = triangle_loss(interval, scale, data, neighbors)
default_loss_ = default_loss(interval, scale, data, neighbors)
Loading