From a17c9212c1708ec4871e02ed81039d2ebb49ccb2 Mon Sep 17 00:00:00 2001
From: Bas Nijholt <basnijholt@gmail.com>
Date: Mon, 8 Oct 2018 17:08:34 +0200
Subject: [PATCH] add 'save' and 'load' to the learners and periodic saving to
 the Runner

---
 adaptive/learner/average_learner.py           |   6 ++
 adaptive/learner/balancing_learner.py         |  73 +++++++++++++
 adaptive/learner/base_learner.py              |  79 ++++++++++++++
 adaptive/learner/data_saver.py                |  22 ++++
 adaptive/learner/integrator_learner.py        |  27 +++++
 adaptive/learner/learner1D.py                 |   6 ++
 adaptive/learner/learner2D.py                 |  11 ++
 adaptive/learner/learnerND.py                 |   6 ++
 adaptive/runner.py                            |  26 +++++
 adaptive/tests/test_learners.py               |  86 ++++++++++++++-
 adaptive/utils.py                             |  30 +++++-
 .../tutorial/tutorial.advanced-topics.rst     | 101 +++++++++++++++---
 learner.ipynb                                 | 100 +++++++++++++++++
 13 files changed, 553 insertions(+), 20 deletions(-)

diff --git a/adaptive/learner/average_learner.py b/adaptive/learner/average_learner.py
index 0eb69c3f..312101bf 100644
--- a/adaptive/learner/average_learner.py
+++ b/adaptive/learner/average_learner.py
@@ -125,3 +125,9 @@ class AverageLearner(BaseLearner):
         num_bins = int(max(5, sqrt(self.npoints)))
         vals = hv.Points(vals)
         return hv.operation.histogram(vals, num_bins=num_bins, dimension=1)
+
+    def _get_data(self):
+        return (self.data, self.npoints, self.sum_f, self.sum_f_sq)
+
+    def _set_data(self, data):
+        self.data, self.npoints, self.sum_f, self.sum_f_sq = data
diff --git a/adaptive/learner/balancing_learner.py b/adaptive/learner/balancing_learner.py
index e12f2307..764758ba 100644
--- a/adaptive/learner/balancing_learner.py
+++ b/adaptive/learner/balancing_learner.py
@@ -3,6 +3,7 @@ from collections import defaultdict
 from contextlib import suppress
 from functools import partial
 from operator import itemgetter
+import os.path
 
 import numpy as np
 
@@ -302,3 +303,75 @@ class BalancingLearner(BaseLearner):
             learner = learner_type(function=partial(f, **combo), **learner_kwargs)
             learners.append(learner)
         return cls(learners, cdims=arguments)
+
+    def save(self, folder, compress=True):
+        """Save the data of the child learners into pickle files
+        in a directory.
+
+        Parameters
+        ----------
+        folder : str
+            Directory in which the learners's data will be saved.
+        compress : bool, default True
+            Compress the data upon saving using 'gzip'. When saving
+            using compression, one must load it with compression too.
+
+        Notes
+        -----
+        The child learners need to have a 'fname' attribute in order to use
+        this method.
+
+        Example
+        -------
+        >>> def combo_fname(val):
+        ...     return '__'.join([f'{k}_{v}.p' for k, v in val.items()])
+        ...
+        ... def f(x, a, b): return a * x**2 + b
+        ...
+        >>> learners = []
+        >>> for combo in adaptive.utils.named_product(a=[1, 2], b=[1]):
+        ...     l = Learner1D(functools.partial(f, combo=combo))
+        ...     l.fname = combo_fname(combo)  # 'a_1__b_1.p', 'a_2__b_1.p' etc.
+        ...     learners.append(l)
+        ... learner = BalancingLearner(learners)
+        ... # Run the learner
+        ... runner = adaptive.Runner(learner)
+        ... # Then save
+        ... learner.save('data_folder')  # use 'load' in the same way
+        """
+        if len(self.learners) != len(set(l.fname for l in self.learners)):
+            raise RuntimeError("The 'learner.fname's are not all unique.")
+
+        for l in self.learners:
+            l.save(os.path.join(folder, l.fname), compress=compress)
+
+    def load(self, folder, compress=True):
+        """Load the data of the child learners from pickle files
+        in a directory.
+
+        Parameters
+        ----------
+        folder : str
+            Directory from which the learners's data will be loaded.
+        compress : bool, default True
+            If the data is compressed when saved, one must load it
+            with compression too.
+
+        Notes
+        -----
+        The child learners need to have a 'fname' attribute in order to use
+        this method.
+
+        Example
+        -------
+        See the example in the 'BalancingLearner.save' doc-string.
+        """
+        for l in self.learners:
+            l.load(os.path.join(folder, l.fname), compress=compress)
+
+    def _get_data(self):
+        return [l._get_data() for l in learner.learners]
+
+    def _set_data(self, data):
+        for l, _data in zip(self.learners, data):
+            l._set_data(_data)
diff --git a/adaptive/learner/base_learner.py b/adaptive/learner/base_learner.py
index 37c68571..b33cc018 100644
--- a/adaptive/learner/base_learner.py
+++ b/adaptive/learner/base_learner.py
@@ -1,7 +1,10 @@
 # -*- coding: utf-8 -*-
 import abc
+from contextlib import suppress
 from copy import deepcopy
 
+from ..utils import save, load
+
 
 class BaseLearner(metaclass=abc.ABCMeta):
     """Base class for algorithms for learning a function 'f: X → Y'.
@@ -83,8 +86,84 @@ class BaseLearner(metaclass=abc.ABCMeta):
         """
         pass
 
+    @abc.abstractmethod
+    def _get_data(self):
+        pass
+
+    @abc.abstractmethod
+    def _set_data(self):
+        pass
+
+    def copy_from(self, other):
+        """Copy over the data from another learner.
+
+        Parameters
+        ----------
+        other : BaseLearner object
+            The learner from which the data is copied.
+        """
+        self._set_data(other._get_data())
+
+    def save(self, fname=None, compress=True):
+        """Save the data of the learner into a pickle file.
+
+        Parameters
+        ----------
+        fname : str, optional
+            The filename of the learner's pickle data file. If None use
+            the 'fname' attribute, like 'learner.fname = "example.p".
+        compress : bool, default True
+            Compress the data upon saving using 'gzip'. When saving
+            using compression, one must load it with compression too.
+
+        Notes
+        -----
+        There are __two ways__ of naming the files:
+        1. Using the 'fname' argument in 'learner.save(fname='example.p')
+        2. Setting the 'fname' attribute, like
+           'learner.fname = "data/example.p"' and then 'learner.save()'.
+        """
+        fname = fname or self.fname
+        data = self._get_data()
+        save(fname, data, compress)
+
+    def load(self, fname=None, compress=True):
+        """Load the data of a learner from a pickle file.
+
+        Parameters
+        ----------
+        fname : str, optional
+            The filename of the saved learner's pickled data file.
+            If None use the 'fname' attribute, like
+            'learner.fname = "example.p".
+        compress : bool, default True
+            If the data is compressed when saved, one must load it
+            with compression too.
+
+        Notes
+        -----
+        See the notes in the 'BaseLearner.save' doc-string.
+        """
+        fname = fname or self.fname
+        with suppress(FileNotFoundError, EOFError):
+            data = load(fname, compress)
+            self._set_data(data)
+
     def __getstate__(self):
         return deepcopy(self.__dict__)
 
     def __setstate__(self, state):
         self.__dict__ = state
+
+    @property
+    def fname(self):
+        # This is a property because then it will be availible in the DataSaver
+        try:
+            return self._fname
+        except AttributeError:
+            raise AttributeError("Set 'learner.fname' or use the 'fname'"
+                " argument when using 'learner.save' or 'learner.load'.")
+
+    @fname.setter
+    def fname(self, fname):
+        self._fname = fname
diff --git a/adaptive/learner/data_saver.py b/adaptive/learner/data_saver.py
index b07cb952..832c1a9e 100644
--- a/adaptive/learner/data_saver.py
+++ b/adaptive/learner/data_saver.py
@@ -2,6 +2,9 @@
 from collections import OrderedDict
 import functools
 
+from .base_learner import BaseLearner
+from ..utils import copy_docstring_from
+
 
 class DataSaver:
     """Save extra data associated with the values that need to be learned.
@@ -40,6 +43,25 @@ class DataSaver:
     def tell_pending(self, x):
         self.learner.tell_pending(x)
 
+    def _get_data(self):
+        return self.learner._get_data(), self.extra_data
+
+    def _set_data(self, data):
+        learner_data, self.extra_data = data
+        self.learner._set_data(learner_data)
+
+    @copy_docstring_from(BaseLearner.save)
+    def save(self, fname=None, compress=True):
+        # We copy this method because the 'DataSaver' is not a
+        # subclass of the 'BaseLearner'.
+        BaseLearner.save(self, fname, compress)
+
+    @copy_docstring_from(BaseLearner.load)
+    def load(self, fname=None, compress=True):
+        # We copy this method because the 'DataSaver' is not a
+        # subclass of the 'BaseLearner'.
+        BaseLearner.load(self, fname, compress)
+
 
 def _ds(learner_type, arg_picker, *args, **kwargs):
     args = args[2:]  # functools.partial passes the first 2 arguments in 'args'!
diff --git a/adaptive/learner/integrator_learner.py b/adaptive/learner/integrator_learner.py
index 6d39d5d5..931d4531 100644
--- a/adaptive/learner/integrator_learner.py
+++ b/adaptive/learner/integrator_learner.py
@@ -525,3 +525,30 @@ class IntegratorLearner(BaseLearner):
         xs, ys = zip(*[(x, y) for ival in ivals
                        for x, y in sorted(ival.done_points.items())])
         return hv.Path((xs, ys))
+
+    def _get_data(self):
+        # Change the defaultdict of SortedSets to a normal dict of sets.
+        x_mapping = {k: set(v) for k, v in self.x_mapping.items()}
+
+        return (self.priority_split,
+                self.done_points,
+                self.pending_points,
+                self._stack,
+                x_mapping,
+                self.ivals,
+                self.first_ival)
+
+    def _set_data(self, data):
+        self.priority_split, self.done_points, self.pending_points, \
+            self._stack, x_mapping, self.ivals, self.first_ival = data
+
+        # Add the pending_points to the _stack such that they are evaluated again
+        for x in self.pending_points:
+            if x not in self._stack:
+                self._stack.append(x)
+
+        # x_mapping is a data structure that can't easily be saved
+        # so we recreate it here
+        self.x_mapping = defaultdict(lambda: SortedSet([], key=attrgetter('rdepth')))
+        for k, _set in x_mapping.items():
+            self.x_mapping[k].update(_set)
diff --git a/adaptive/learner/learner1D.py b/adaptive/learner/learner1D.py
index 20509475..343a9def 100644
--- a/adaptive/learner/learner1D.py
+++ b/adaptive/learner/learner1D.py
@@ -485,3 +485,9 @@ class Learner1D(BaseLearner):
         self.pending_points = set()
         self.losses_combined = deepcopy(self.losses)
         self.neighbors_combined = deepcopy(self.neighbors)
+
+    def _get_data(self):
+        return self.data
+
+    def _set_data(self, data):
+        self.tell_many(*zip(*data.items()))
diff --git a/adaptive/learner/learner2D.py b/adaptive/learner/learner2D.py
index f44419ee..bbc3f644 100644
--- a/adaptive/learner/learner2D.py
+++ b/adaptive/learner/learner2D.py
@@ -1,5 +1,6 @@
 # -*- coding: utf-8 -*-
 from collections import OrderedDict
+from copy import copy
 import itertools
 from math import sqrt
 
@@ -522,3 +523,13 @@ class Learner2D(BaseLearner):
         no_hover = dict(plot=dict(inspection_policy=None, tools=[]))
 
         return im.opts(style=im_opts) * tris.opts(style=tri_opts, **no_hover)
+
+    def _get_data(self):
+        return self.data
+
+    def _set_data(self, data):
+        self.data = data
+        # Remove points from stack if they already exist
+        for point in copy(self._stack):
+            if point in self.data:
+                self._stack.pop(point)
diff --git a/adaptive/learner/learnerND.py b/adaptive/learner/learnerND.py
index f2dc8e6e..274ae16f 100644
--- a/adaptive/learner/learnerND.py
+++ b/adaptive/learner/learnerND.py
@@ -572,3 +572,9 @@ class LearnerND(BaseLearner):
             return im.opts(style=dict(cmap='viridis'))
         else:
             raise ValueError("Only 1 or 2-dimensional plots can be generated.")
+
+    def _get_data(self):
+        return self.data
+
+    def _set_data(self, data):
+        self.tell_many(*zip(*data.items()))
diff --git a/adaptive/runner.py b/adaptive/runner.py
index f1114d9e..ce2b61ad 100644
--- a/adaptive/runner.py
+++ b/adaptive/runner.py
@@ -449,6 +449,7 @@ class AsyncRunner(BaseRunner):
                                              self.function)
 
         self.task = self.ioloop.create_task(self._run())
+        self.saving_task = None
         if in_ipynb() and not self.ioloop.is_running():
             warnings.warn("The runner has been scheduled, but the asyncio "
                           "event loop is not running! If you are "
@@ -541,6 +542,31 @@ class AsyncRunner(BaseRunner):
             end_time = time.time()
         return end_time - self.start_time
 
+    def start_periodic_saving(self, save_kwargs, interval):
+        """Periodically save the learner's data.
+
+        Parameters
+        ----------
+        save_kwargs : dict
+            Key-word arguments for 'learner.save(**save_kwargs)'.
+        interval : int
+            Number of seconds between saving the learner.
+
+        Example
+        -------
+        >>> runner = Runner(learner)
+        >>> runner.start_periodic_saving(
+        ...     save_kwargs=dict(fname='data/test.pickle'),
+        ...     interval=600)
+        """
+        async def _saver(save_kwargs=save_kwargs, interval=interval):
+            while self.status() == 'running':
+                self.learner.save(**save_kwargs)
+                await asyncio.sleep(interval)
+            self.learner.save(**save_kwargs)  # one last time
+        self.saving_task = self.ioloop.create_task(_saver())
+        return self.saving_task
+
 
 # Default runner
 Runner = AsyncRunner
diff --git a/adaptive/tests/test_learners.py b/adaptive/tests/test_learners.py
index a8010894..9fb3b0d8 100644
--- a/adaptive/tests/test_learners.py
+++ b/adaptive/tests/test_learners.py
@@ -1,17 +1,22 @@
 # -*- coding: utf-8 -*-
 
 import collections
+import functools as ft
 import inspect
 import itertools as it
-import functools as ft
-import random
 import math
-import numpy as np
-import scipy.spatial
+import operator
+import os
+import random
+import shutil
+import tempfile
 
+import numpy as np
 import pytest
+import scipy.spatial
 
-from ..learner import AverageLearner, BalancingLearner, Learner1D, Learner2D, LearnerND
+from ..learner import (AverageLearner, BalancingLearner, DataSaver,
+    IntegratorLearner, Learner1D, Learner2D, LearnerND, SKOptLearner)
 from ..runner import simple
 
 
@@ -386,6 +391,77 @@ def test_balancing_learner(learner_type, f, learner_kwargs):
     assert all(l.npoints > 10 for l in learner.learners), [l.npoints for l in learner.learners]
 
 
+@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
+    IntegratorLearner)
+def test_saving(learner_type, f, learner_kwargs):
+    f = generate_random_parametrization(f)
+    learner = learner_type(f, **learner_kwargs)
+    control = learner_type(f, **learner_kwargs)
+    simple(learner, lambda l: l.npoints > 100)
+    fd, path = tempfile.mkstemp()
+    try:
+        learner.save(path)
+        control.load(path)
+        if learner_type is not Learner1D:
+            # Because different scales result in differnt losses
+            np.testing.assert_almost_equal(learner.loss(), control.loss())
+
+        # Try if the control is runnable
+        simple(control, lambda l: l.npoints > 200)
+    finally:
+        os.remove(path)
+
+
+@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
+    IntegratorLearner)
+def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
+    f = generate_random_parametrization(f)
+    learner = BalancingLearner([learner_type(f, **learner_kwargs)])
+    control = BalancingLearner([learner_type(f, **learner_kwargs)])
+
+    # set fnames
+    learner.learners[0].fname = 'test'
+    control.learners[0].fname = 'test'
+
+    simple(learner, lambda l: l.learners[0].npoints > 100)
+    folder = tempfile.mkdtemp()
+    try:
+        learner.save(folder=folder)
+        control.load(folder=folder)
+        if learner_type is not Learner1D:
+            # Because different scales result in differnt losses
+            np.testing.assert_almost_equal(learner.loss(), control.loss())
+
+        # Try if the control is runnable
+        simple(control, lambda l: l.learners[0].npoints > 200)
+    finally:
+        shutil.rmtree(folder)
+
+
+@run_with(Learner1D, Learner2D, LearnerND, AverageLearner, SKOptLearner,
+    IntegratorLearner)
+def test_saving_with_datasaver(learner_type, f, learner_kwargs):
+    f = generate_random_parametrization(f)
+    g = lambda x: {'y': f(x), 't': random.random()}
+    arg_picker = operator.itemgetter('y')
+    learner = DataSaver(learner_type(g, **learner_kwargs), arg_picker)
+    control = DataSaver(learner_type(g, **learner_kwargs), arg_picker)
+    simple(learner, lambda l: l.npoints > 100)
+    fd, path = tempfile.mkstemp()
+    try:
+        learner.save(path)
+        control.load(path)
+        if learner_type is not Learner1D:
+            # Because different scales result in differnt losses
+            np.testing.assert_almost_equal(learner.loss(), control.loss())
+        assert learner.extra_data == control.extra_data
+
+        # Try if the control is runnable
+        simple(control, lambda l: l.npoints > 200)
+    finally:
+        os.remove(path)
+
+
 @pytest.mark.xfail
 @run_with(Learner1D, Learner2D, LearnerND)
 def test_convergence_for_arbitrary_ordering(learner_type, f, learner_kwargs):
diff --git a/adaptive/utils.py b/adaptive/utils.py
index 8895351c..041e0681 100644
--- a/adaptive/utils.py
+++ b/adaptive/utils.py
@@ -1,7 +1,10 @@
 # -*- coding: utf-8 -*-
 from contextlib import contextmanager
-from functools import wraps
+import functools
+import gzip
 from itertools import product
+import os
+import pickle
 import time
 
 
@@ -30,7 +33,7 @@ def restore(*learners):
 def cache_latest(f):
     """Cache the latest return value of the function and add it
     as 'self._cache[f.__name__]'."""
-    @wraps(f)
+    @functools.wraps(f)
     def wrapper(*args, **kwargs):
         self = args[0]
         if not hasattr(self, '_cache'):
@@ -38,3 +41,26 @@ def cache_latest(f):
         self._cache[f.__name__] = f(*args, **kwargs)
         return self._cache[f.__name__]
     return wrapper
+
+
+def save(fname, data, compress=True):
+    fname = os.path.expanduser(fname)
+    dirname = os.path.dirname(fname)
+    if dirname:
+        os.makedirs(dirname, exist_ok=True)
+    _open = gzip.open if compress else open
+    with _open(fname, 'wb') as f:
+        pickle.dump(data, f, protocol=pickle.HIGHEST_PROTOCOL)
+
+
+def load(fname, compress=True):
+    fname = os.path.expanduser(fname)
+    _open = gzip.open if compress else open
+    with _open(fname, 'rb') as f:
+        return pickle.load(f)
+
+
+def copy_docstring_from(other):
+    def decorator(method):
+        return functools.wraps(other)(method)
+    return decorator
diff --git a/docs/source/tutorial/tutorial.advanced-topics.rst b/docs/source/tutorial/tutorial.advanced-topics.rst
index ae0eaceb..f84b8812 100644
--- a/docs/source/tutorial/tutorial.advanced-topics.rst
+++ b/docs/source/tutorial/tutorial.advanced-topics.rst
@@ -17,21 +17,98 @@ Advanced Topics
     import adaptive
     adaptive.notebook_extension()
 
+    import asyncio
     from functools import partial
     import random
 
     offset = random.uniform(-0.5, 0.5)
 
-    def f(x, offset=offset, wait=True):
-        from time import sleep
-        from random import random
-
+    def f(x, offset=offset):
         a = 0.01
-        if wait:
-            sleep(random())
         return x + a**2 / (a**2 + (x - offset)**2)
 
 
+Saving and loading learners
+---------------------------
+
+Every learner has a `~adaptive.BaseLearner.save` and `~adaptive.BaseLearner.load`
+method that can be used to save and load **only** the data of a learner.
+
+There are **two ways** of naming the files: 1. Using the ``fname``
+argument in ``learner.save(fname=...)`` 2. Setting the ``fname``
+attribute, like ``learner.fname = 'data/example.p`` and then
+``learner.save()``
+
+The second way *must be used* when saving the ``learner``\s of a
+`~adaptive.BalancingLearner`.
+
+By default the resulting pickle files are compressed, to turn this off
+use ``learner.save(fname=..., compress=False)``
+
+.. execute::
+
+    # Let's create two learners and run only one.
+    learner = adaptive.Learner1D(f, bounds=(-1, 1))
+    control = adaptive.Learner1D(f, bounds=(-1, 1))
+
+    # Let's only run the learner
+    runner = adaptive.Runner(learner, goal=lambda l: l.loss() < 0.01)
+
+.. execute::
+    :hide-code:
+
+    await runner.task  # This is not needed in a notebook environment!
+
+.. execute::
+
+    runner.live_info()
+
+.. execute::
+
+    fname = 'data/example_file.p'
+    learner.save(fname)
+    control.load(fname)
+
+    (learner.plot().relabel('saved learner')
+     + control.plot().relabel('loaded learner'))
+
+Or just (without saving):
+
+.. execute::
+
+    control = adaptive.Learner1D(f, bounds=(-1, 1))
+    control.copy_from(learner)
+
+One can also periodically save the learner while running in a
+`~adaptive.Runner`. Use it like:
+
+.. execute::
+
+    def slow_f(x):
+        from time import sleep
+        sleep(5)
+        return x
+
+    learner = adaptive.Learner1D(slow_f, bounds=[0, 1])
+    runner = adaptive.Runner(learner, goal=lambda l: l.npoints > 100)
+    runner.start_periodic_saving(save_kwargs=dict(fname='data/periodic_example.p'), interval=6)
+
+.. execute::
+    :hide-code:
+
+    await asyncio.sleep(6)  # This is not needed in a notebook environment!
+    runner.cancel()
+
+.. execute::
+
+    runner.live_info()  # we cancelled it after 6 seconds
+
+.. execute::
+
+    # See the data 6 later seconds with
+    !ls -lah data  # only works on macOS and Linux systems
+
+
 A watched pot never boils!
 --------------------------
 
@@ -62,7 +139,7 @@ The simplest way to accomplish this is to use
 
 .. execute::
 
-    learner = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))
+    learner = adaptive.Learner1D(f, bounds=(-1, 1))
     adaptive.BlockingRunner(learner, goal=lambda l: l.loss() < 0.01)
     # This will only get run after the runner has finished
     learner.plot()
@@ -89,7 +166,7 @@ learner:
 
 .. execute::
 
-    learner = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))
+    learner = adaptive.Learner1D(f, bounds=(-1, 1))
 
     # blocks until completion
     adaptive.runner.simple(learner, goal=lambda l: l.loss() < 0.01)
@@ -107,7 +184,7 @@ non-blocking `adaptive.Runner`, you can use the
 
     from adaptive.runner import SequentialExecutor
 
-    learner = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))
+    learner = adaptive.Learner1D(f, bounds=(-1, 1))
 
     runner = adaptive.Runner(learner, executor=SequentialExecutor(), goal=lambda l: l.loss() < 0.01)
 
@@ -146,12 +223,11 @@ the runner. You can also stop the runner programatically using
 .. execute::
     :hide-code:
 
-    import asyncio
-    await asyncio.sleep(3)  # This is not needed in the notebook!
+    await asyncio.sleep(0.1)  # This is not needed in the notebook!
 
 .. execute::
 
-    runner.cancel()  # Let's execute this after 3 seconds
+    runner.cancel()  # Let's execute this after 0.1 seconds
 
 .. execute::
 
@@ -195,7 +271,6 @@ will raise an exception 10% of the time.
 .. execute::
     :hide-code:
 
-    import asyncio
     await asyncio.sleep(4)  # in 4 seconds it will surely have failed
 
 .. execute::
diff --git a/learner.ipynb b/learner.ipynb
index a296d3e2..3a70b55e 100644
--- a/learner.ipynb
+++ b/learner.ipynb
@@ -1000,6 +1000,106 @@
     "# Advanced Topics"
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Saving and loading learners"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Every learner has a `save` and `load` method that can be used to save and load **only** the data of a learner.\n",
+    "\n",
+    "There are __two ways__ of naming the files:\n",
+    "1. Using the `fname` argument in `learner.save(fname=...)`\n",
+    "2. Setting the `fname` attribute, like `learner.fname = 'data/example.p` and then `learner.save()`\n",
+    "\n",
+    "The second way _must be used_ when saving the `learner`s of a `BalancingLearner`.\n",
+    "\n",
+    "By default the resulting pickle files are compressed, to turn this off use `learner.save(fname=..., compress=False)`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Let's create two learners and run only one.\n",
+    "learner = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))\n",
+    "control = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))\n",
+    "\n",
+    "# Let's only run the learner\n",
+    "runner = adaptive.Runner(learner, goal=lambda l: l.loss() < 0.01)\n",
+    "runner.live_info()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "fname = 'data/example_file.p'\n",
+    "learner.save(fname)\n",
+    "control.load(fname)\n",
+    "\n",
+    "learner.plot().relabel('saved learner') + control.plot().relabel('loaded learner')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "Or just (without saving):"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "control = adaptive.Learner1D(partial(f, wait=False), bounds=(-1, 1))\n",
+    "control.copy_from(learner)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "One can also periodically save the learner while it's run in a `Runner`. You can use it like:"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def slow_f(x):\n",
+    "    from time import sleep\n",
+    "    sleep(5)\n",
+    "    return x\n",
+    "learner = adaptive.Learner1D(slow_f, bounds=[0, 1])\n",
+    "runner = adaptive.Runner(learner, goal=lambda l: l.npoints > 100)\n",
+    "runner.start_periodic_saving(save_kwargs=dict(fname='data/periodic_example.p'), interval=6)\n",
+    "runner.live_info()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# See the data after 6 seconds with\n",
+    "!ls -lah data  # only works on macOS and Linux systems"
+   ]
+  },
   {
    "cell_type": "markdown",
    "metadata": {},
-- 
GitLab