Like we discussed already this week, this is super awesome!
I took the relevant code (notebook has a lot of NameErrors) and sped it up a bit:
frombisectimportbisect_leftimportadaptiveadaptive.notebook_extension()importnumpyasnpfromadaptive.learner.learner1Dimportdefault_lossfromadaptive.learner.learnerNDimportvolumedefsimple_runner(learner,goal):whilenotgoal(learner):x=learner.ask(1)[0][0]y=learner.function(x)learner.tell(x,y)# Hack to also update the losses of neigbouring intervalssorted_data=sorted(learner.data)index=bisect_left(sorted_data,x)xs=[sorted_data[i]foriinrange(index-1,index+3)if0<=i<len(sorted_data)]iflen(xs)>2:foriinrange(len(xs)-1):ival=xs[i],xs[i+1]learner._update_interpolated_loss_in_interval(*ival)defloss_of_multi_interval(xs,function_values):pts=[(x,function_values[x])forxinxs]N=len(pts)-2returnsum(volume(pts[i:i+3])foriinrange(N))/Ndeftriangle_loss(interval,scale,function_values):_default_loss=default_loss(interval,scale,function_values)x_left,x_right=intervaldata=sorted(function_values)index=bisect_left(data,x_left)xs=[data[i]foriinrange(index-1,index+3)if0<=i<len(data)]dx=x_right-x_leftiflen(xs)<=2:returndxelse:triangle_loss=loss_of_multi_interval(xs,function_values)returntriangle_loss**0.5+0.02*_default_loss+0.02*dxdeff5(x):returnnp.exp(-(x-0.3)**2/0.1**3)learner=adaptive.Learner1D(f5,(-1,1),loss_per_interval=triangle_loss)simple_runner(learner,goal=lambdal:l.npoints>1000)learner.plot()