Skip to content
Snippets Groups Projects

Resolve "(Learner1D) add possibility to use the direct neighbors in the loss"

Merged Jorn Hoofwijk requested to merge 119-add-second-order-loss-to-adaptive into master
Compare and Show latest version
3 files
+ 25
17
Compare changes
  • Side-by-side
  • Inline
Files
3
@@ -3,12 +3,14 @@ from copy import deepcopy
import heapq
import itertools
import math
from collections import Iterable
import numpy as np
import sortedcontainers
from .base_learner import BaseLearner
from .learnerND import volume
from .triangulation import simplex_volume_in_embedding
from ..notebook_integration import ensure_holoviews
from ..utils import cache_latest
@@ -58,37 +60,40 @@ def default_loss(interval, scale, function_values):
def _loss_of_multi_interval(xs, ys):
pts = list(zip(xs, ys))
N = len(pts)-2
return sum(volume(pts[i:i+3]) for i in range(N)) / N
N = len(xs) - 2
if isinstance(ys[0], Iterable):
pts = [(x, *y) for x, y in zip(xs, ys)]
# print(pts)
return sum(simplex_volume_in_embedding(pts[i:i+3]) for i in range(N)) / N
else:
pts = [(x, y) for x, y in zip(xs, ys)]
return sum(volume(pts[i:i+3]) for i in range(N)) / N
def triangle_loss(interval, neighbours, scale, function_values):
x_left, x_right = interval
neighbour_left, neighbour_right = neighbours
x_scale, y_scale = scale
dx = (x_right - x_left) / x_scale
xs = [neighbour_left, x_left, x_right, neighbour_right]
# The neighbours could be None if we are at the boundary, in that case we
# have to filter this out
xs = [x for x in xs if x is not None]
y_scale = y_scale or 1
y_scale = scale[1] or 1
ys = [function_values[x] / y_scale for x in xs]
xs = [x / x_scale for x in xs]
if len(xs) <= 2:
return dx
return (x_right - x_left) / scale[0]
else:
return _loss_of_multi_interval(xs, ys)
xs_scaled = [x / scale[0] for x in xs]
return _loss_of_multi_interval(xs_scaled, ys)
def get_curvature_loss(area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
def curvature_loss(interval, neighbours, scale, function_values):
triangle_loss_ = triangle_loss(interval, neighbours, scale, function_values)
default_loss_ = default_loss(interval, scale, function_values)
dx = interval[1] - interval[0]
dx = (interval[1] - interval[0]) / scale[0]
return (area_factor * (triangle_loss_**0.5)
+ euclid_factor * old_loss
+ euclid_factor * default_loss_
+ horizontal_factor * dx)
return curvature_loss
@@ -158,7 +163,7 @@ class Learner1D(BaseLearner):
self._loss_depends_on_neighbours = loss_depends_on_neighbours
if loss_depends_on_neighbours:
self.loss_per_interval = loss_per_interval or curvature_loss
self.loss_per_interval = loss_per_interval or get_curvature_loss()
else:
self.loss_per_interval = loss_per_interval or default_loss
@@ -307,7 +312,7 @@ class Learner1D(BaseLearner):
@staticmethod
def _find_neighbors(x, neighbors):
if x in neighbors:
return neighbors
return neighbors[x]
if x is None:
return None, None
pos = neighbors.bisect_left(x)
Loading