@@ -293,7 +293,7 @@ def opt(self):
293293 delta_x = spla .spsolve (M ,Lty )
294294 scale = 1
295295 # Damp the Gauss-Newton step if it doesn't do what the linearization predicts
296- scale_good = False
296+ scale_good = la . norm ( delta_x ) < 10 # if the first step is too small, just do it and don't even check
297297 while not scale_good :
298298 next_y = self .create_y (self .add_delta (delta_x * scale ))
299299 pred_y = y - L .dot (delta_x * scale )
@@ -350,26 +350,26 @@ def test_Jacobian(batch_uni, col, dx = .001):
350350 opt_class .opt ()
351351 plt .plot (opt_class .states [:,0 ],opt_class .states [:,1 ],'b' ,label = 'opt' )
352352 plt .legend ()
353- plt .savefig (f'{ prefix } _res .png' )
353+ plt .savefig (f'{ prefix } _res_new .png' )
354354
355355 plt .figure ()
356356 plt .plot (opt_class .states [:,0 ],opt_class .states [:,1 ],c = 'b' , label = 'estimate' )
357357 plt .plot (truth [:,0 ],truth [:,1 ],'r--' ,label = 'truth' )
358358 plt .legend ()
359359 ax = plt .gca ()
360360 ax .set_aspect ('equal' )
361- plt .savefig ('FG_' + prefix + '.png' )
361+ plt .savefig ('FG_' + prefix + '_new .png' )
362362 plt .show ()
363363
364364
365365 plt .figure ()
366366 plt .plot (opt_class .states - truth )
367367 plt .legend (['x' ,'y' ,'vx' ,'vy' ])
368368 plt .title ('errors' )
369- plt .savefig (f'{ prefix } _errors .png' )
369+ plt .savefig (f'{ prefix } _errors_new .png' )
370370 plt .show ()
371371
372- np .savez ('fg_' + prefix + '_res ' ,fg_res = opt_class .states , truth = truth )
372+ np .savez ('fg_' + prefix + '_res_new ' ,fg_res = opt_class .states , truth = truth )
373373
374374
375375
0 commit comments