diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py
index c0fa73b606f873e42b4cb842be174ad5484b6b25..62c112fc80ad84e240696d42a2f816cb9fe8f0f5 100644
--- a/adaptive/learner/learner1D.py
+++ b/adaptive/learner/learner1D.py
@@ -15,7 +15,7 @@ from ..notebook_integration import ensure_holoviews
 from ..utils import cache_latest
 
 
-def uniform_loss(interval, scale, function_values, neighbors):
+def uniform_loss(interval, scale, data, neighbors):
     """Loss function that samples the domain uniformly.
 
     Works with `~adaptive.Learner1D` only.
@@ -36,7 +36,7 @@ def uniform_loss(interval, scale, function_values, neighbors):
     return dx
 
 
-def default_loss(interval, scale, function_values, neighbors):
+def default_loss(interval, scale, data, neighbors):
     """Calculate loss on a single interval.
 
     Currently returns the rescaled length of the interval. If one of the
@@ -44,7 +44,7 @@ def default_loss(interval, scale, function_values, neighbors):
     never touched. This behavior should be improved later.
     """
     x_left, x_right = interval
-    y_right, y_left = function_values[x_right], function_values[x_left]
+    y_right, y_left = data[x_right], data[x_left]
     x_scale, y_scale = scale
     dx = (x_right - x_left) / x_scale
     if y_scale == 0:
@@ -70,7 +70,7 @@ def _loss_of_multi_interval(xs, ys):
     return sum(vol(pts[i:i+3]) for i in range(N)) / N
 
 
-def triangle_loss(interval, scale, function_values, neighbors):
+def triangle_loss(interval, scale, data, neighbors):
     x_left, x_right = interval
     xs = [neighbors[x_left][0], x_left, x_right, neighbors[x_right][1]]
     xs = [x for x in xs if x is not None]
@@ -79,15 +79,15 @@ def triangle_loss(interval, scale, function_values, neighbors):
         return (x_right - x_left) / scale[0]
     else:
         y_scale = scale[1] or 1
-        ys_scaled = [function_values[x] / y_scale for x in xs]
+        ys_scaled = [data[x] / y_scale for x in xs]
         xs_scaled = [x / scale[0] for x in xs]
         return _loss_of_multi_interval(xs_scaled, ys_scaled)
 
 
 def get_curvature_loss(area_factor=1, euclid_factor=0.02, horizontal_factor=0.02):
-    def curvature_loss(interval, scale, function_values, neighbors):
-        triangle_loss_ = triangle_loss(interval, scale, function_values, neighbors)
-        default_loss_ = default_loss(interval, scale, function_values, neighbors)
+    def curvature_loss(interval, scale, data, neighbors):
+        triangle_loss_ = triangle_loss(interval, scale, data, neighbors)
+        default_loss_ = default_loss(interval, scale, data, neighbors)
         dx = (interval[1] - interval[0]) / scale[0]
         return (area_factor * (triangle_loss_**0.5)
                 + euclid_factor * default_loss_
@@ -163,11 +163,13 @@ class Learner1D(BaseLearner):
     scale : (float, float)
         The x and y scale over all the intervals, useful for rescaling the
         interval loss.
-    function_values : dict(float → float)
+    data : dict(float → float)
         A map containing evaluated function values. It is guaranteed
         to have values for both of the points in 'interval'.
     neighbors : dict(float → (float, float))
         A map containing points as keys to its neighbors as a tuple.
+        At the left ``x_left`` and right ``x_left`` most boundary it has
+        ``x_left: (None, float)`` and ``x_right: (float, None)``.
     """
 
     def __init__(self, function, bounds, loss_per_interval=None, nn_neighbors=0):
diff --git a/docs/source/tutorial/tutorial.custom_loss.rst b/docs/source/tutorial/tutorial.custom_loss.rst
index b1cca2951424a751d522f945ec5cfdd86a58e5f3..3c76a9df3a6981ec9e106347c32b288e5f9d6122 100644
--- a/docs/source/tutorial/tutorial.custom_loss.rst
+++ b/docs/source/tutorial/tutorial.custom_loss.rst
@@ -60,8 +60,8 @@ simple (but naive) strategy is to *uniformly* sample the domain:
 
 .. jupyter-execute::
 
-    def uniform_sampling_1d(interval, scale, function_values):
-        # Note that we never use 'function_values'; the loss is just the size of the subdomain
+    def uniform_sampling_1d(interval, scale, data):
+        # Note that we never use 'data'; the loss is just the size of the subdomain
         x_left, x_right = interval
         x_scale, _ = scale
         dx = (x_right - x_left) / x_scale
diff --git a/learner.ipynb b/learner.ipynb
index 3a70b55eae4047e32a74c21a0cbe2f849eec3434..8bb30ec1a0a717b82fb637875c66738903744344 100644
--- a/learner.ipynb
+++ b/learner.ipynb
@@ -559,8 +559,8 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "def uniform_sampling_1d(interval, scale, function_values):\n",
-    "    # Note that we never use 'function_values'; the loss is just the size of the subdomain\n",
+    "def uniform_sampling_1d(interval, scale, data):\n",
+    "    # Note that we never use 'data'; the loss is just the size of the subdomain\n",
     "    x_left, x_right = interval\n",
     "    x_scale, _ = scale\n",
     "    dx = (x_right - x_left) / x_scale\n",