Skip to content
Snippets Groups Projects

Resolve "(Learner1D) add possibility to use the direct neighbors in the loss"

Merged Jorn Hoofwijk requested to merge 119-add-second-order-loss-to-adaptive into master
1 unresolved thread
1 file
+ 28
24
Compare changes
  • Side-by-side
  • Inline
@@ -12,6 +12,7 @@ from .learnerND import volume
from ..notebook_integration import ensure_holoviews
from ..utils import cache_latest
def uniform_loss(interval, scale, function_values):
"""Loss function that samples the domain uniformly.
@@ -56,36 +57,39 @@ def default_loss(interval, scale, function_values):
return loss
def loss_of_multi_interval(xs, ys):
def _loss_of_multi_interval(xs, ys):
pts = list(zip(xs, ys))
vols = [volume(pts[i:i+3]) for i in range(len(pts)-2)]
return np.average(vols)
N = len(pts) - 2
return sum(volume(pts[i:i+3]) for i in range(N)) / N
def curvature_loss(interval, neighbours, scale, function_values, area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
def triangle_loss(interval, neighbours, scale, function_values):
x_left, x_right = interval
neighbour_left, neighbour_right = neighbours
x_scale, y_scale = scale
dx = (x_right - x_left) / x_scale
old_loss = default_loss(interval, scale, function_values)
xs = [neighbour_left, x_left, x_right, neighbour_right]
# The neighbours could be None if we are at the boundary, in that case we
# The neighbours could be None if we are at the boundary, in that case we
# have to filter this out
xs = [x for x in xs if x is not None]
ys = [function_values[x]/y_scale for x in xs]
if y_scale == 0:
y_scale = 1
xs = [x/x_scale for x in xs]
xs = [x for x in xs if x is not None]
y_scale = scale[1] or 1
ys = [function_values[x] / y_scale for x in xs]
xs = [x / scale[0] for x in xs]
if len(xs) <= 2:
return dx
return (x_right - x_left) / scale[0]
else:
l = loss_of_multi_interval(xs, ys)
return area_factor * (l**0.5) + euclid_factor * old_loss + horizontal_factor * dx
return _loss_of_multi_interval(xs, ys)
def get_curvature_loss(area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
def curvature_loss(interval, neighbours, scale, function_values):
triangle_loss_ = triangle_loss(interval, neighbours, scale, function_values)
default_loss_ = default_loss(interval, scale, function_values)
dx = interval[1] - interval[0] / scale[0]
return (area_factor * (triangle_loss_**0.5)
+ euclid_factor * old_loss
+ horizontal_factor * dx)
return curvature_loss
def linspace(x_left, x_right, n):
"""This is equivalent to
@@ -215,7 +219,7 @@ class Learner1D(BaseLearner):
def _get_loss_in_interval(self, x_left, x_right):
if x_left is None or x_right is None:
return None
dx = x_right - x_left
if dx < self._dx_eps:
return 0
@@ -230,12 +234,12 @@ class Learner1D(BaseLearner):
self._scale, self.data)
else:
return self.loss_per_interval(interval, self._scale, self.data)
def _update_interpolated_loss_in_interval(self, x_left, x_right):
if x_left is None or x_right is None:
return None
dx = x_right - x_left
loss = self._get_loss_in_interval(x_left, x_right)
self.losses[x_left, x_right] = loss
@@ -301,7 +305,7 @@ class Learner1D(BaseLearner):
@staticmethod
def _find_neighbors(x, neighbors):
if x in neighbors:
return neighbors[x]
return neighbors
if x is None:
return None, None
pos = neighbors.bisect_left(x)
Loading