Skip to content
Snippets Groups Projects
Commit 1a0cb288 authored by Bas Nijholt's avatar Bas Nijholt
Browse files

time -> iterations

parent 49c4d79e
No related branches found
No related tags found
No related merge requests found
Pipeline #21406 passed
%% Cell type:code id: tags:
```
import numpy as np
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
%matplotlib inline
# %config InlineBackend.figure_format = 'svg'
golden_mean = (np.sqrt(5) - 1) / 2 # Aesthetic ratio
fig_width_pt = 246.0 # Columnwidth
inches_per_pt = 1 / 72.27 # Convert pt to inches
fig_width = fig_width_pt * inches_per_pt
fig_height = fig_width * golden_mean # height in inches
fig_size = [fig_width, fig_height]
params = {
"backend": "ps",
"axes.labelsize": 13,
"font.size": 13,
"legend.fontsize": 10,
"xtick.labelsize": 10,
"ytick.labelsize": 10,
"text.usetex": True,
"figure.figsize": fig_size,
"font.family": "serif",
"font.serif": "Computer Modern Roman",
"legend.frameon": True,
"savefig.dpi": 300,
}
plt.rcParams.update(params)
plt.rc("text.latex", preamble=[r"\usepackage{xfrac}", r"\usepackage{siunitx}"])
import adaptive
from scipy import interpolate
import functools
import itertools
import adaptive
import holoviews.plotting.mpl
import time
import matplotlib.tri as mtri
```
%% Cell type:markdown id: tags:
# Fig 1.
%% Cell type:code id: tags:
```
np.random.seed(1)
xs = np.array([0.1, 0.3, 0.35, 0.45])
f = lambda x: x ** 3
ys = f(xs)
means = lambda x: np.convolve(x, np.ones(2) / 2, mode="valid")
xs_means = means(xs)
ys_means = means(ys)
fig, ax = plt.subplots(figsize=fig_size)
ax.scatter(xs, ys, c="k")
ax.plot(xs, ys, c="k")
# ax.scatter()
ax.annotate(
s=r"$L_{1,2} = \sqrt{\Delta x^2 + \Delta y^2}$",
xy=(np.mean([xs[0], xs[1]]), np.mean([ys[0], ys[1]])),
xytext=(xs[0] + 0.05, ys[0] - 0.05),
arrowprops=dict(arrowstyle="->"),
ha="center",
zorder=10,
)
for i, (x, y) in enumerate(zip(xs, ys)):
sign = [1, -1][i % 2]
ax.annotate(
s=fr"$x_{i+1}, y_{i+1}$",
xy=(x, y),
xytext=(x + 0.01, y + sign * 0.04),
arrowprops=dict(arrowstyle="->"),
ha="center",
)
ax.scatter(xs, ys, c="green", s=5, zorder=5, label="existing data")
losses = np.hypot(xs[1:] - xs[:-1], ys[1:] - ys[:-1])
ax.scatter(
xs_means, ys_means, c="red", s=300 * losses, zorder=8, label="candidate points"
)
xs_dense = np.linspace(xs[0], xs[-1], 400)
ax.plot(xs_dense, f(xs_dense), alpha=0.3, zorder=7, label="function")
ax.legend()
ax.axis("off")
plt.savefig("figures/loss_1D.pdf", bbox_inches="tight", transparent=True)
plt.show()
```
%% Cell type:markdown id: tags:
# Fig 2.
%% Cell type:code id: tags:
```
def f(x, offset=0.123):
a = 0.02
return x + a ** 2 / (a ** 2 + (x - offset) ** 2)
def g(x):
return np.tanh(x * 40)
def h(x):
return np.sin(100 * x) * np.exp(-x ** 2 / 0.1 ** 2)
funcs_1D = [
dict(function=f, bounds=(-1, 1), title="peak"),
dict(function=g, bounds=(-1, 1), title="tanh"),
dict(function=h, bounds=(-0.3, 0.3), title="wave packet"),
]
fig, axs = plt.subplots(2, len(funcs_1D), figsize=(fig_width, 1.5 * fig_height))
n_points = 50
for i, ax in enumerate(axs.T.flatten()):
ax.xaxis.set_ticks([])
ax.yaxis.set_ticks([])
kind = "homogeneous" if i % 2 == 0 else "adaptive"
index = i // 2 if kind == "homogeneous" else (i - 1) // 2
d = funcs_1D[index]
bounds = d["bounds"]
f = d["function"]
if kind == "homogeneous":
xs = np.linspace(*bounds, n_points)
ys = f(xs)
ax.set_title(rf"\textrm{{{d['title']}}}")
elif kind == "adaptive":
loss = adaptive.learner.learner1D.curvature_loss_function()
learner = adaptive.Learner1D(
f, bounds=bounds, loss_per_interval=loss
)
adaptive.runner.simple(learner, goal=lambda l: l.npoints >= n_points)
xs, ys = zip(*sorted(learner.data.items()))
xs_dense = np.linspace(*bounds, 1000)
ax.plot(xs_dense, f(xs_dense), c="red", alpha=0.3, lw=0.5)
ax.scatter(xs, ys, s=0.5, c="k")
axs[0][0].set_ylabel(r"$\textrm{homogeneous}$")
axs[1][0].set_ylabel(r"$\textrm{adaptive}$")
plt.savefig("figures/adaptive_vs_grid.pdf", bbox_inches="tight", transparent=True)
```
%% Cell type:markdown id: tags:
# Fig 3.
%% Cell type:code id: tags:
```
def f(xy, offset=0.123):
a = 0.2
x, y = xy
return x + np.exp(-(x ** 2 + y ** 2 - 0.75 ** 2) ** 2 / a ** 4)
@functools.lru_cache()
def g_setup(fname):
data = adaptive.utils.load(fname)
points = np.array(list(data.keys()))
values = np.array(list(data.values()), dtype=float)
bounds = [
(points[:, 0].min(), points[:, 0].max()),
(points[:, 1].min(), points[:, 1].max()),
]
ll, ur = np.reshape(bounds, (2, 2)).T
inds = np.all(np.logical_and(ll <= points, points <= ur), axis=1)
points, values = points[inds], values[inds].reshape(-1, 1)
return interpolate.LinearNDInterpolator(points, values), bounds
def g(xy, fname):
ip, _ = g_setup(fname)
return np.round(ip(xy))
def density(x, eps=0):
e = [0.8, 0.2]
delta = [0.5, 0.5, 0.5]
c = 3
omega = [0.02, 0.05]
H = np.array(
[
[e[0] + 1j * omega[0], delta[0], delta[1]],
[delta[0], e[1] + c * x + 1j * omega[1], delta[1]],
[delta[1], delta[2], e[1] - c * x + 1j * omega[1]],
]
)
H += np.eye(3) * eps
return np.trace(np.linalg.inv(H)).imag
def h(xy):
x, y = xy
return density(x, y) + y
funcs = [
dict(function=f, bounds=[(-1, 1), (-1, 1)], npoints=33),
dict(
function=g,
bounds=g_setup("phase_diagram.pickle")[1],
npoints=100,
fname="phase_diagram.pickle",
),
dict(function=h, bounds=[(-1, 1), (-3, 3)], npoints=50),
]
fig, axs = plt.subplots(2, len(funcs), figsize=(fig_width, 1.5 * fig_height))
plt.subplots_adjust(hspace=-0.1, wspace=0.1)
with_tri = False
for i, ax in enumerate(axs.T.flatten()):
label = "abcdef"[i]
ax.text(
0.5,
1.05,
f"$\mathrm{{({label})}}$",
transform=ax.transAxes,
horizontalalignment="center",
verticalalignment="bottom",
)
ax.xaxis.set_ticks([])
ax.yaxis.set_ticks([])
kind = "homogeneous" if i % 2 == 0 else "adaptive"
d = funcs[i // 2] if kind == "homogeneous" else funcs[(i - 1) // 2]
bounds = d["bounds"]
npoints = d["npoints"]
f = d["function"]
fname = d.get("fname")
if fname is not None:
f = functools.partial(f, fname=fname)
if kind == "homogeneous":
xs, ys = [np.linspace(*bound, npoints) for bound in bounds]
data = {xy: f(xy) for xy in itertools.product(xs, ys)}
learner = adaptive.Learner2D(f, bounds=bounds)
learner.data = data
d["learner_hom"] = learner
elif kind == "adaptive":
learner = adaptive.Learner2D(f, bounds=bounds)
if fname is not None:
learner.load(fname)
learner.data = {
k: v for i, (k, v) in enumerate(learner.data.items()) if i <= npoints ** 2
}
adaptive.runner.simple(learner, goal=lambda l: l.npoints >= npoints ** 2)
d["learner"] = learner
if with_tri:
tri = learner.ip().tri
triang = mtri.Triangulation(*tri.points.T, triangles=tri.vertices)
ax.triplot(triang, c="w", lw=0.2, alpha=0.8)
values = np.array(list(learner.data.values()))
ax.imshow(
learner.plot(npoints if kind == "homogeneous" else None).Image.I.data,
extent=(-0.5, 0.5, -0.5, 0.5),
interpolation="none",
)
ax.set_xticks([])
ax.set_yticks([])
axs[0][0].set_ylabel(r"$\textrm{homogeneous}$")
axs[1][0].set_ylabel(r"$\textrm{adaptive}$")
plt.savefig("figures/adaptive_2D.pdf", bbox_inches="tight", transparent=True)
```
%% Cell type:code id: tags:
```
learner = adaptive.Learner1D(funcs_1D[0]['function'], bounds=funcs_1D[0]['bounds'])
times = []
for i in range(10000):
t_start = time.time()
points, _ = learner.ask(1)
t_end = time.time()
times.append(t_end - t_start)
learner.tell(points[0], learner.function(points[0]))
plt.plot(np.cumsum(times))
```
%% Cell type:markdown id: tags:
# Algo explaination
%% Cell type:code id: tags:
```
fig, axs = plt.subplots(1, 1, figsize=(fig_width, 3 * fig_height))
def f(x, offset=0.123):
a = 0.2
return a ** 2 / (a ** 2 + (x - offset) ** 2)
learner = adaptive.Learner1D(
f, (0, 2), loss_per_interval=adaptive.learner.learner1D.curvature_loss_function()
)
learner._recompute_losses_factor = 0.1
xs_dense = np.linspace(*learner.bounds, 400)
ys_dense = f(xs_dense)
step = 0.4
for i in range(11):
offset = -i * step
x = learner.ask(1)[0][0]
y = f(x)
learner.tell(x, y)
xs, ys = map(np.array, zip(*sorted(learner.data.items())))
ys = ys + offset
if i >= 1:
axs.plot(xs_dense, ys_dense + offset, c="k", alpha=0.3, zorder=0)
axs.plot(xs, ys, zorder=1, c="k")
axs.scatter(xs, ys, alpha=1, zorder=2, c="k")
(x_left, x_right), loss = list(learner.losses.items())[
0
] # it's a ItemSortedDict
(y_left, y_right) = [
learner.data[x_left] + offset,
learner.data[x_right] + offset,
]
axs.scatter([x_left, x_right], [y_left, y_right], c="r", s=10, zorder=3)
x_mid = np.mean((x_left, x_right))
y_mid = np.interp(x_mid, (x_left, x_right), (y_left, y_right))
axs.scatter(x_mid, y_mid, zorder=4, marker="x", c="green")
axs.text(
-0.1,
0.5,
(r"$\mathrm{time}$" + "\n" + "$\longleftarrow$"),
(r"$\mathrm{iterations}$" + "\n" + "$\longleftarrow$"),
transform=axs.transAxes,
horizontalalignment="center",
verticalalignment="center",
rotation=90,
fontsize=18,
)
# legend
import matplotlib.patches as mpatches
import matplotlib.lines as mlines
class LargestInterval:
pass
class Interval:
pass
class Function:
pass
class IntervalHandler:
def __init__(self, with_inner=True, length=20, *args, **kwargs):
super().__init__(*args, **kwargs)
self.with_inner = with_inner
self.length = length
def legend_artist(self, legend, orig_handle, fontsize, handlebox):
x0, y0 = handlebox.xdescent, handlebox.ydescent
offsets = [0, self.length]
line = mlines.Line2D((0, offsets[-1]), (0, 0), zorder=0, c="k")
handlebox.add_artist(line)
for offset in offsets:
circle1 = mpatches.Circle(
[x0 + offset, y0], 4, facecolor="k", lw=3, zorder=1
)
handlebox.add_artist(circle1)
if self.with_inner:
circle2 = mpatches.Circle(
[x0 + offset, y0], 3, facecolor="red", lw=3, zorder=1
)
handlebox.add_artist(circle2)
class FunctionHandler:
def __init__(self, xs, ys, *args, **kwargs):
super().__init__(*args, **kwargs)
self.xs = xs / xs.ptp() * 20
self.ys = ys - ys.mean()
def legend_artist(self, legend, orig_handle, fontsize, handlebox):
x0, y0 = handlebox.xdescent, handlebox.ydescent
line = mlines.Line2D(self.xs, self.ys * 10, zorder=0, c="k", alpha=0.3)
handlebox.add_artist(line)
plt.legend(
[
Function(),
mlines.Line2D([], [], marker="o", lw=0, c="k"),
LargestInterval(),
Interval(),
mlines.Line2D([], [], marker="x", lw=0, c="green"),
],
[
"original function",
"known point",
"interval",
"largest loss interval",
"next candidate point",
],
handler_map={
LargestInterval: IntervalHandler(False),
Interval: IntervalHandler(True),
Function: FunctionHandler(xs, ys),
},
bbox_to_anchor=(0.25, 0.9, 1.0, 0.0),
ncol=1,
)
# On grid
# axs.plot(learner.bounds, [-(i + 0.5) * step, -(i + 0.5) * step], c='k', ls='--')
# xs_hom = np.linspace(*learner.bounds, i)
# ys_hom = f(xs_hom) - (i + 3) * step
# axs.plot(xs_hom, ys_hom, zorder=1, c="k")
# axs.scatter(xs_hom, ys_hom, alpha=1, zorder=2, c="k")
axs.axis("off")
plt.savefig("figures/algo.pdf", bbox_inches="tight", transparent=True)
plt.show()
```
%% Cell type:code id: tags:
```
from matplotlib.patches import Polygon
fig, axs = plt.subplots(5, 1, figsize=(fig_width, 1.5 * fig_height))
f = lambda x: np.sin(x)**2
xs = np.array([0, 1.3, 3, 5, 7, 8])
ys = f(xs)
def plot(xs, ax):
ys = f(xs)
xs_dense = np.linspace(xs[0], xs[-1], 300)
ys_dense = f(xs_dense)
ax.plot(xs_dense, ys_dense, alpha=0.3, c="k")
ax.plot(xs, ys, c="k")
ax.scatter(xs, ys, zorder=10, s=14, c="k")
plot(xs, axs[0])
plot(xs, axs[1])
for i, ax in enumerate(axs):
ax.axis("off")
ax.set_ylim(-1.5, 1.5)
label = "abcde"[i]
ax.text(
0.5,
0.9,
f"$\mathrm{{({label})}}$",
transform=ax.transAxes,
horizontalalignment="center",
verticalalignment="bottom",
)
def plot_tri(xs, ax):
ys = f(xs)
for i in range(len(xs)):
if i == 0 or i == len(xs) - 1:
continue
color = f"C{i}"
verts = [(xs[i - 1], ys[i - 1]), (xs[i], ys[i]), (xs[i + 1], ys[i + 1])]
poly = Polygon(verts, facecolor=color, alpha=0.4)
ax.add_patch(poly)
ax.scatter([xs[i]], [ys[i]], c=color, s=6, zorder=11)
ax.plot(
[xs[i - 1], xs[i + 1]], [ys[i - 1], ys[i + 1]], c="k", ls="--", alpha=0.3
)
plot_tri(xs, axs[1])
for i in [2, 3, 4]:
ax = axs[i]
learner = adaptive.Learner1D(
f,
(xs[0], xs[-1]),
loss_per_interval=adaptive.learner.learner1D.curvature_loss_function(),
)
learner.tell_many(xs, ys)
x_new = learner.ask(1)[0][0]
learner.tell(x_new, f(x_new))
xs, ys = zip(*sorted(learner.data.items()))
plot(xs, ax)
plot_tri(xs, ax)
plt.savefig("figures/line_loss.pdf", bbox_inches="tight", transparent=True)
plt.show()
```
......
No preview for this file type
......@@ -40,7 +40,7 @@ One of the most significant complications here is to parallelize this algorithm,
We start by calculating the two boundary points.
Two consecutive existing data points (black) $\{x_i, y_i\}$ define an interval.
Each interval has a loss associated with it that can be calculated from the points inside the interval $L_{i,i+1}(x_i, x_{i+1}, y_i, y_{i+1})$.
At each time step the interval with the largest loss is indicated (red), with its corresponding candidate point (green) picked in the middle of the interval.
At each iteration the interval with the largest loss is indicated (red), with its corresponding candidate point (green) picked in the middle of the interval.
The loss function in this example is the curvature loss.
](figures/algo.pdf){#fig:algo}
......@@ -201,12 +201,14 @@ By adding the two loss functions, we can combine the 3D area loss to exploit int
Inspired by a method commonly employed in digital cartography for coastline simplification, Visvalingam's algorithm, we construct a loss function that does its reverse.[@visvalingam1990douglas]
Here, at each point (ignoring the boundary points), we compute the effective area associated with its triangle, see Fig. @fig:line_loss(b).
The loss then becomes the average area of two adjacent traingles.
By Taylor expanding $f$ around $x$ it can be shown that the area of the triangles relates to the contributions of the second derivative.
We can generalize this loss to $N$ dimensions
![Line loss visualization.
We start with 6 points (a) on the function (grey).
Ignoring the endpoints, the effective area of each point is determined by its associated triangle (b).
The loss of each interval can be computed by taking the average area of the adjacent triangles.
Subplots (c), (d), and (e) show the subsequent time steps following (b).](figures/line_loss.pdf){#fig:line_loss}
Subplots (c), (d), and (e) show the subsequent interations following (b).](figures/line_loss.pdf){#fig:line_loss}
<!-- https://bost.ocks.org/mike/simplify/ -->
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment