diff --git a/paper.bib b/paper.bib
index 898cdb98ad026700a14e3aeae6b0b372df906964..dab14af96a6371d0d78175344ca4f5131de65629 100755
--- a/paper.bib
+++ b/paper.bib
@@ -147,6 +147,12 @@
   note = {Champaign, IL, 2019}
 }
 
+@misc{adaptive_docs,
+  author = {Bas Nijholt and Joseph Weston and Anton Akhmerov},
+  title = {Adaptive documentation},
+  note = {https://adaptive.readthedocs.io}
+}
+
 @article{klein1999star,
   title={Star formation with 3-D adaptive mesh refinement: the collapse and fragmentation of molecular clouds},
   author={Klein, Richard I},
diff --git a/paper.md b/paper.md
index 9225e072b5def916c1df0880c88305d50e7acfe7..8871af9b66961a75e8fc065e63d0af706a6d1b34 100755
--- a/paper.md
+++ b/paper.md
@@ -305,8 +305,8 @@ runner = Runner(learner, goal)
 ```
 
 #### The BalancingLearner can run many learners simultaneously.
-Frequently, we need to run more than one function (learner) at once, for this we have implemented the `BalancingLearner`.
-This learner asks all child learners for points, and will choose the point of the learner that maximizes the loss improvement.
+Frequently, we need to run more than one function (learner) at once, for this we have implemented the `BalancingLearner`, which does not take a function, but a list of learners.
+This learner asks all child learners for points and will choose the point of the learner that maximizes the loss improvement, thereby it balances the resources over the different learners.
 We can use it like
 ```python
 from functools import partial
@@ -320,6 +320,7 @@ bal_learner = BalancingLearner(learners)
 runner = Runner(bal_learner, goal)
 
 ```
+For more details on how to use Adaptive we recommend to read the documentation [@adaptive_docs].
 
 # Possible extensions