From b00f266f4d1fdf796213818662bacfaecf99c50a Mon Sep 17 00:00:00 2001
From: Bas Nijholt <basnijholt@gmail.com>
Date: Thu, 22 Nov 2018 12:36:22 +0100
Subject: [PATCH] fix several documentation mistakes

* add 'curvature_loss_function' to the 'tutorial.custom_loss.rst'
* fix header styling
* fix doc-string
---
 adaptive/learner/learner1D.py                 | 2 +-
 docs/source/tutorial/tutorial.LearnerND.rst   | 2 +-
 docs/source/tutorial/tutorial.custom_loss.rst | 3 +++
 3 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py
index 74b554cc..4f705c90 100644
--- a/adaptive/learner/learner1D.py
+++ b/adaptive/learner/learner1D.py
@@ -32,7 +32,7 @@ def uses_nth_neighbors(n):
     The next function is a part of the `curvature_loss_function` function.
 
     >>> @uses_nth_neighbors(1)
-    ...def triangle_loss(xs, ys):
+    ... def triangle_loss(xs, ys):
     ...    xs = [x for x in xs if x is not None]
     ...    ys = [y for y in ys if y is not None]
     ...
diff --git a/docs/source/tutorial/tutorial.LearnerND.rst b/docs/source/tutorial/tutorial.LearnerND.rst
index 6d160b51..aff2a029 100644
--- a/docs/source/tutorial/tutorial.LearnerND.rst
+++ b/docs/source/tutorial/tutorial.LearnerND.rst
@@ -92,7 +92,7 @@ lines. However, as always, when you sample more points the graph will
 become gradually smoother.
 
 Using any convex shape as domain
---------------------------------
+................................
 
 Suppose you do not simply want to sample your function on a square (in 2D) or in
 a cube (in 3D). The LearnerND supports using a `scipy.spatial.ConvexHull` as
diff --git a/docs/source/tutorial/tutorial.custom_loss.rst b/docs/source/tutorial/tutorial.custom_loss.rst
index 9e9c0f37..cee34ba8 100644
--- a/docs/source/tutorial/tutorial.custom_loss.rst
+++ b/docs/source/tutorial/tutorial.custom_loss.rst
@@ -46,11 +46,14 @@ tl;dr, one can use the following *loss functions* that
 
 + `adaptive.learner.learner1D.default_loss`
 + `adaptive.learner.learner1D.uniform_loss`
++ `adaptive.learner.learner1D.curvature_loss_function`
 + `adaptive.learner.learner2D.default_loss`
 + `adaptive.learner.learner2D.uniform_loss`
 + `adaptive.learner.learner2D.minimize_triangle_surface_loss`
 + `adaptive.learner.learner2D.resolution_loss_function`
 
+Whenever a loss function has `_function` appended to its name, it is a factory function
+that returns the loss function with certain settings.
 
 Uniform sampling
 ~~~~~~~~~~~~~~~~
-- 
GitLab