Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 120--learnernd-curvature
  • 133-use-a-itemsorteddict-for-the-loss-in-the-learnernd
  • 134-learner1d-load-throws-exception-when-file-is-empty
  • 74--add-anisotropicity-to-learnerND
  • AverageLearner2D
  • bugfix/suppress
  • ci_benchmarks
  • cython
  • function_in_runner
  • make_notebook_with_content
  • master
  • no_overlay_plotting
  • private_methods_learnernd
  • refactor/triangulating-learner
  • renorm_2d
  • rst_readme
  • rtol_integrator
  • stable-0.7
  • v0.1.0
  • v0.2.0
  • v0.2.0-dev
  • v0.2.1
  • v0.3.0
  • v0.3.0-dev
  • v0.4.0
  • v0.4.0-dev
  • v0.4.1
  • v0.5.0
  • v0.5.0-dev
  • v0.6.0
  • v0.7.0
  • v0.7.0-dev
  • v0.7.2
  • v0.8.0-dev
34 results

Target

Select target project
No results found
Select Git revision
  • 120--learnernd-curvature
  • 133-use-a-itemsorteddict-for-the-loss-in-the-learnernd
  • 134-learner1d-load-throws-exception-when-file-is-empty
  • 74--add-anisotropicity-to-learnerND
  • AverageLearner2D
  • bugfix/suppress
  • ci_benchmarks
  • cython
  • function_in_runner
  • make_notebook_with_content
  • master
  • no_overlay_plotting
  • private_methods_learnernd
  • refactor/triangulating-learner
  • renorm_2d
  • rst_readme
  • rtol_integrator
  • stable-0.7
  • v0.1.0
  • v0.2.0
  • v0.2.0-dev
  • v0.2.1
  • v0.3.0
  • v0.3.0-dev
  • v0.4.0
  • v0.4.0-dev
  • v0.4.1
  • v0.5.0
  • v0.5.0-dev
  • v0.6.0
  • v0.7.0
  • v0.7.0-dev
  • v0.7.2
  • v0.8.0-dev
34 results
Show changes
Commits on Source (3)
......@@ -222,6 +222,7 @@ class LearnerND(BaseLearner):
try:
self._tri = Triangulation(self.points)
self.update_losses(set(), self._tri.simplices)
return self._tri
except ValueError:
# A ValueError is raised if we do not have enough points or
......@@ -229,8 +230,6 @@ class LearnerND(BaseLearner):
# a valid triangulation
return None
# XXX: also compute losses of initial simplex
@property
def values(self):
return np.array(list(self.data.values()), dtype=float)
......@@ -359,8 +358,8 @@ class LearnerND(BaseLearner):
# Could not find a simplex, this code should never be reached
assert self.tri is not None
raise AssertionError(
"Could not find a simplex to. Yet there should always be a simplex "
"available if LearnerND.tri() is not None"
"Could not find a simplex to subdivide. Yet there should always be"
"a simplex available if LearnerND.tri() is not None."
)
def _ask_best_point(self):
......
......@@ -353,8 +353,7 @@ def test_learner_performance_is_invariant_under_scaling(learner_type, f, learner
assert abs(learner.loss() - control.loss()) / learner.loss() < 1e-11
# XXX: The LearnerND shouldn't fail, see https://gitlab.kwant-project.org/qt/adaptive/issues/105
@run_with(Learner1D, Learner2D, xfail(LearnerND), AverageLearner)
@run_with(Learner1D, Learner2D, LearnerND, AverageLearner)
def test_balancing_learner(learner_type, f, learner_kwargs):
"""Test if the BalancingLearner works with the different types of learners."""
learners = [learner_type(generate_random_parametrization(f), **learner_kwargs)
......