Skip to content
Snippets Groups Projects

test all the different loss functions in each test

Merged Bas Nijholt requested to merge test_loss_functions into master
2 files
+ 15
12
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -228,6 +228,11 @@ class Learner1D(BaseLearner):
self.losses = {}
self.losses_combined = {}
# When the scale changes by a factor 2, the losses are
# recomputed. This is tunable such that we can test
# the learners behavior in the tests.
self._recompute_losses_factor = 2
self.data = {}
self.pending_points = set()
@@ -447,7 +452,7 @@ class Learner1D(BaseLearner):
self._update_losses(x, real=True)
# If the scale has increased enough, recompute all losses.
if self._scale[1] > 2 * self._oldscale[1]:
if self._scale[1] > self._recompute_losses_factor * self._oldscale[1]:
for interval in self.losses:
self._update_interpolated_loss_in_interval(*interval)
Loading