Skip to content
Snippets Groups Projects

Resolve "(Learner1D) add possibility to use the direct neighbors in the loss"

Merged Jorn Hoofwijk requested to merge 119-add-second-order-loss-to-adaptive into master
Compare and Show latest version
3 files
+ 16
23
Compare changes
  • Side-by-side
  • Inline
Files
3
@@ -63,11 +63,11 @@ def _loss_of_multi_interval(xs, ys):
N = len(xs) - 2
if isinstance(ys[0], Iterable):
pts = [(x, *y) for x, y in zip(xs, ys)]
# print(pts)
return sum(simplex_volume_in_embedding(pts[i:i+3]) for i in range(N)) / N
vol = simplex_volume_in_embedding
else:
pts = [(x, y) for x, y in zip(xs, ys)]
return sum(volume(pts[i:i+3]) for i in range(N)) / N
vol = volume
return sum(vol(pts[i:i+3]) for i in range(N)) / N
def triangle_loss(interval, neighbours, scale, function_values):
@@ -77,14 +77,14 @@ def triangle_loss(interval, neighbours, scale, function_values):
# The neighbours could be None if we are at the boundary, in that case we
# have to filter this out
xs = [x for x in xs if x is not None]
y_scale = scale[1] or 1
ys = [function_values[x] / y_scale for x in xs]
if len(xs) <= 2:
return (x_right - x_left) / scale[0]
else:
y_scale = scale[1] or 1
ys_scaled = [function_values[x] / y_scale for x in xs]
xs_scaled = [x / scale[0] for x in xs]
return _loss_of_multi_interval(xs_scaled, ys)
return _loss_of_multi_interval(xs_scaled, ys_scaled)
def get_curvature_loss(area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
@@ -227,16 +227,14 @@ class Learner1D(BaseLearner):
if x_left is None or x_right is None:
return None
dx = x_right - x_left
if dx < self._dx_eps:
if x_right - x_left < self._dx_eps:
return 0
# we need to compute the loss for this interval
interval = (x_left, x_right)
if self._loss_depends_on_neighbours:
neighbour_left = self._find_neighbors(x_left , self.neighbors)[0]
neighbour_right = self._find_neighbors(x_right, self.neighbors)[1]
neighbours = neighbour_left, neighbour_right
neighbours = [self.neighbors.get(x, (None, None))[i]
for i, x in enumerate(interval)]
return self.loss_per_interval(interval, neighbours,
self._scale, self.data)
else:
@@ -247,13 +245,13 @@ class Learner1D(BaseLearner):
if x_left is None or x_right is None:
return None
dx = x_right - x_left
loss = self._get_loss_in_interval(x_left, x_right)
self.losses[x_left, x_right] = loss
# Iterate over all interpolated intervals in between
# x_left and x_right and set the newly interpolated loss.
a, b = x_left, None
dx = x_right - x_left
while b != x_right:
b = self.neighbors_combined[a][1]
self.losses_combined[a, b] = (b - a) * loss / dx
@@ -280,8 +278,8 @@ class Learner1D(BaseLearner):
# if the loss depends on the neighbors we should also update those losses
if self._loss_depends_on_neighbours:
neighbour_left = self._find_neighbors(x_left , self.neighbors)[0]
neighbour_right = self._find_neighbors(x_right, self.neighbors)[1]
neighbour_left = self.neighbors.get(x_left, (None, None))[0]
neighbour_right = self.neighbors.get(x_right, (None, None))[1]
self._update_interpolated_loss_in_interval(neighbour_left, x_left)
self._update_interpolated_loss_in_interval(x_right, neighbour_right)
@@ -313,8 +311,6 @@ class Learner1D(BaseLearner):
def _find_neighbors(x, neighbors):
if x in neighbors:
return neighbors[x]
if x is None:
return None, None
pos = neighbors.bisect_left(x)
keys = neighbors.keys()
x_left = keys[pos - 1] if pos != 0 else None
Loading