From 56c110c9d2dc716f4489b2b418f165f04c68ad4a Mon Sep 17 00:00:00 2001
From: Bas Nijholt <basnijholt@gmail.com>
Date: Wed, 11 Sep 2019 11:25:05 +0200
Subject: [PATCH] finish literature review

---
 figures.ipynb | 123 ++++++++++++++++++++++++++------------------------
 paper.bib     |  53 ++++++++++++++++++++++
 paper.md      |   8 +++-
 3 files changed, 122 insertions(+), 62 deletions(-)

diff --git a/figures.ipynb b/figures.ipynb
index 9390567..799e8c6 100644
--- a/figures.ipynb
+++ b/figures.ipynb
@@ -43,68 +43,10 @@
    ]
   },
   {
-   "cell_type": "code",
-   "execution_count": null,
+   "cell_type": "markdown",
    "metadata": {},
-   "outputs": [],
    "source": [
-    "np.random.seed(1)\n",
-    "xs = np.sort(np.random.uniform(-1, 1, 3))\n",
-    "errs = np.abs(np.random.randn(3))\n",
-    "ys = xs ** 3\n",
-    "means = lambda x: np.convolve(x, np.ones(2) / 2, mode=\"valid\")\n",
-    "xs_means = means(xs)\n",
-    "ys_means = means(ys)\n",
-    "\n",
-    "fig, ax = plt.subplots()\n",
-    "plt.scatter(xs, ys, c=\"k\")\n",
-    "ax.errorbar(xs, ys, errs, capsize=5, c=\"k\")\n",
-    "ax.annotate(\n",
-    "    s=r\"$L_{1,2} = \\sqrt{\\Delta x^2 + \\Delta \\bar{y}^2}$\",\n",
-    "    xy=(np.mean([xs[0], xs[1], xs[1]]), np.mean([ys[0], ys[1], ys[1]])),\n",
-    "    xytext=(xs_means[0], ys_means[0] + 1),\n",
-    "    arrowprops=dict(arrowstyle=\"->\"),\n",
-    "    ha=\"center\",\n",
-    ")\n",
-    "\n",
-    "for i, (x, y, err) in enumerate(zip(xs, ys, errs)):\n",
-    "    err_str = fr'${{\\sigma}}_{{\\bar {{y}}_{i+1}}}$'\n",
-    "    ax.annotate(\n",
-    "        s=err_str,\n",
-    "        xy=(x, y + err / 2),\n",
-    "        xytext=(x + 0.1, y + err + 0.5),\n",
-    "        arrowprops=dict(arrowstyle=\"->\"),\n",
-    "        ha=\"center\",\n",
-    "    )\n",
-    "\n",
-    "    ax.annotate(\n",
-    "        s=fr\"$x_{i+1}, \\bar{{y}}_{i+1}$\",\n",
-    "        xy=(x, y),\n",
-    "        xytext=(x + 0.1, y - 0.5),\n",
-    "        arrowprops=dict(arrowstyle=\"->\"),\n",
-    "        ha=\"center\",\n",
-    "    )\n",
-    "\n",
-    "\n",
-    "ax.scatter(xs, ys, c=\"green\", s=5, zorder=5, label=\"more seeds\")\n",
-    "ax.scatter(xs_means, ys_means, c=\"red\", s=5, zorder=5, label=\"new point\")\n",
-    "ax.legend()\n",
-    "\n",
-    "ax.text(\n",
-    "    x=0.5,\n",
-    "    y=0.0,\n",
-    "    s=(\n",
-    "        r\"$\\textrm{if}\\; \\max{(L_{i,i+1})} > \\textrm{average\\_priority} \\cdot \\max{\\sigma_{\\bar{y}_{i}}} \\rightarrow,\\;\\textrm{add new point}$\"\n",
-    "        \"\\n\"\n",
-    "        r\"$\\textrm{if}\\; \\max{(L_{i,i+1})} < \\textrm{average\\_priority} \\cdot \\max{\\sigma_{\\bar{y}_{i}}} \\rightarrow,\\;\\textrm{add new seeds}$\"\n",
-    "    ),\n",
-    "    horizontalalignment=\"center\",\n",
-    "    verticalalignment=\"center\",\n",
-    "    transform=ax.transAxes,\n",
-    ")\n",
-    "ax.set_title(\"AverageLearner1D\")\n",
-    "ax.axis(\"off\")\n",
-    "plt.show()"
+    "# Fig 1."
    ]
   },
   {
@@ -156,6 +98,67 @@
     "plt.show()"
    ]
   },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Fig 2."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import adaptive"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "def f(x, offset=0.123):\n",
+    "    a = 0.02\n",
+    "    return x + a**2 / (a**2 + (x - offset)**2)\n",
+    "\n",
+    "def g(x):\n",
+    "    return np.tanh(x*40)\n",
+    "\n",
+    "def h(x):\n",
+    "    return np.sin(100*x) * np.exp(-x**2 / 0.1**2)\n",
+    "\n",
+    "funcs = [dict(function=f, bounds=(-1, 1)), dict(function=g, bounds=(-1, 1)), dict(function=h, bounds=(-0.3, 0.3))]\n",
+    "fig, axs = plt.subplots(2, len(funcs), figsize=(fig_width, 1.5*fig_height))\n",
+    "n_points = 50\n",
+    "for i, ax in enumerate(axs.T.flatten()):\n",
+    "    if i % 2 == 0:\n",
+    "        d = funcs[i // 2]\n",
+    "        # homogeneous\n",
+    "        xs = np.linspace(*d['bounds'], n_points)\n",
+    "        ys = d['function'](xs)\n",
+    "    else:\n",
+    "        d = funcs[(i - 1) // 2]\n",
+    "        loss = adaptive.learner.learner1D.curvature_loss_function()\n",
+    "        learner = adaptive.Learner1D(**d, loss_per_interval=loss)\n",
+    "        adaptive.runner.simple(learner, goal=lambda l: l.npoints >= n_points)\n",
+    "        # adaptive\n",
+    "        xs, ys = zip(*sorted(learner.data.items()))\n",
+    "    xs_dense = np.linspace(*d['bounds'], 1000)\n",
+    "    ax.plot(xs_dense, d['function'](xs_dense), c='red', alpha=0.3, lw=0.5)\n",
+    "#     ax.plot(xs, ys, c='k', alpha=0.3, lw=0.3)\n",
+    "    ax.scatter(xs, ys, s=0.5, c='k')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  },
   {
    "cell_type": "code",
    "execution_count": null,
diff --git a/paper.bib b/paper.bib
index 80617ce..3596d87 100755
--- a/paper.bib
+++ b/paper.bib
@@ -146,3 +146,56 @@
   title = {Mathematica, {V}ersion 12.0},
   note = {Champaign, IL, 2019}
 }
+
+@article{klein1999star,
+  title={Star formation with 3-D adaptive mesh refinement: the collapse and fragmentation of molecular clouds},
+  author={Klein, Richard I},
+  journal={Journal of Computational and Applied Mathematics},
+  volume={109},
+  number={1-2},
+  pages={123--152},
+  year={1999},
+  publisher={Elsevier}
+}
+
+@article{berger1989local,
+  title={Local adaptive mesh refinement for shock hydrodynamics},
+  author={Berger, Marsha J and Colella, Phillip},
+  journal={Journal of computational Physics},
+  volume={82},
+  number={1},
+  pages={64--84},
+  year={1989},
+  publisher={Elsevier}
+}
+
+@article{berger1984adaptive,
+  title={Adaptive mesh refinement for hyperbolic partial differential equations},
+  author={Berger, Marsha J and Oliger, Joseph},
+  journal={Journal of computational Physics},
+  volume={53},
+  number={3},
+  pages={484--512},
+  year={1984},
+  publisher={Elsevier}
+}
+
+@inproceedings{derose1998subdivision,
+  title={Subdivision surfaces in character animation},
+  author={DeRose, Tony and Kass, Michael and Truong, Tien},
+  booktitle={Proceedings of the 25th annual conference on Computer graphics and interactive techniques},
+  pages={85--94},
+  year={1998},
+  organization={Citeseer}
+}
+
+@inproceedings{alliez2003anisotropic,
+  title={Anisotropic polygonal remeshing},
+  author={Alliez, Pierre and Cohen-Steiner, David and Devillers, Olivier and L{\'e}vy, Bruno and Desbrun, Mathieu},
+  booktitle={ACM Transactions on Graphics (TOG)},
+  volume={22},
+  number={3},
+  pages={485--493},
+  year={2003},
+  organization={ACM}
+}
diff --git a/paper.md b/paper.md
index 3a6edc5..d066a98 100755
--- a/paper.md
+++ b/paper.md
@@ -70,7 +70,7 @@ Here, the acquired data (i.e., the observations) are used to adjust the experime
 In a typical non-adaptive experiment, decisions on how to sample are made and fixed in advance.
 
 #### Plotting and low dimensional integration uses local sampling.
-Plotting a function in between bounds requires one to evaluate the function on sufficiently many points such that when we interpolate values in between data points, we get an accurate description of the function values that were not explicitly calculated.
+Plotting a low dimensional function in between bounds requires one to evaluate the function on sufficiently many points such that when we interpolate values in between data points, we get an accurate description of the function values that were not explicitly calculated.
 In order to minimize the number of points, one can use adaptive sampling routines.
 For example, for one-dimensional functions, Mathematica[@Mathematica] implements a `FunctionInterpolation` class that takes the function, $x_\textrm{min}$, and $x_\textrm{max}$, and returns an object which sampled the function in regions with high curvature more densily.
 Subsequently, we can query this object for points in between $x_\textrm{min}$ and $x_\textrm{max}$, and get the interpolated value or we can use it to plot the function without specifying a grid.
@@ -80,7 +80,11 @@ In general, it requires more function evaluations than the integration routines
 It is doubly-adaptive because it calculates errors for each interval and can then decide to either split up intervals into more intervals or add more points to each interval.
 
 #### PDE solvers and computer graphics use adaptive meshing.
-<!-- hydrodynamics anisotropic meshing paper ref -->
+Hydrodynamics[@berger1989local] and astrophysics[@klein1999star] use an adaptive technique called adaptive mesh refinement, which can be used to solve partial differential equations (PDEs)[@berger1984adaptive].
+It is a method of adapting the accuracy of a solution dynamically within certain turbulent regions of the simulation domain while the calculation is in progress.
+Computer graphics uses similar adaptive methods where a surface can be represented by a smooth surface via a coarser piecewise linear polygon mesh, called a subdivision surface[@derose1998subdivision].
+An example of such a polygonal remeshing method is one where the polygons align with the curvature of the space or field, this is called anisotropic meshing[@alliez2003anisotropic].
+
 
 # Design constraints and the general algorithm
 
-- 
GitLab