From 09fa7b152ff5bacd9a9fe1cb44f0ec1db6e395b2 Mon Sep 17 00:00:00 2001 From: Bas Nijholt <basnijholt@gmail.com> Date: Tue, 17 Sep 2019 19:05:25 +0200 Subject: [PATCH] add link to docs --- paper.bib | 6 ++++++ paper.md | 5 +++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/paper.bib b/paper.bib index 898cdb9..dab14af 100755 --- a/paper.bib +++ b/paper.bib @@ -147,6 +147,12 @@ note = {Champaign, IL, 2019} } +@misc{adaptive_docs, + author = {Bas Nijholt and Joseph Weston and Anton Akhmerov}, + title = {Adaptive documentation}, + note = {https://adaptive.readthedocs.io} +} + @article{klein1999star, title={Star formation with 3-D adaptive mesh refinement: the collapse and fragmentation of molecular clouds}, author={Klein, Richard I}, diff --git a/paper.md b/paper.md index 9225e07..8871af9 100755 --- a/paper.md +++ b/paper.md @@ -305,8 +305,8 @@ runner = Runner(learner, goal) ``` #### The BalancingLearner can run many learners simultaneously. -Frequently, we need to run more than one function (learner) at once, for this we have implemented the `BalancingLearner`. -This learner asks all child learners for points, and will choose the point of the learner that maximizes the loss improvement. +Frequently, we need to run more than one function (learner) at once, for this we have implemented the `BalancingLearner`, which does not take a function, but a list of learners. +This learner asks all child learners for points and will choose the point of the learner that maximizes the loss improvement, thereby it balances the resources over the different learners. We can use it like ```python from functools import partial @@ -320,6 +320,7 @@ bal_learner = BalancingLearner(learners) runner = Runner(bal_learner, goal) ``` +For more details on how to use Adaptive we recommend to read the documentation [@adaptive_docs]. # Possible extensions -- GitLab