newrat.m: add robust option that uses quadratic approximation to better adapt line search to noisy likelihood shapes

Triggered when line search hits and error code
covariance-quadratic-approximation
Johannes Pfeifer 2023-12-14 21:57:49 +01:00
parent 2898407764
commit d25d95b3b5
4 changed files with 83 additions and 7 deletions

View File

@ -561,6 +561,7 @@ options_.csminwel=csminwel;
%newrat optimization routine
newrat.hess=1; % dynare numerical hessian
newrat.robust=false;
newrat.tolerance.f=1e-5;
newrat.tolerance.f_analytic=1e-7;
newrat.maxiter=1000;

View File

@ -288,6 +288,7 @@ switch minimizer_algorithm
newratflag = options_.newrat.hess; %default
end
nit=options_.newrat.maxiter;
robust = options_.newrat.robust;
Verbose = options_.newrat.verbosity;
Save_files = options_.newrat.Save_files;
if ~isempty(options_.optim_opt)
@ -303,6 +304,8 @@ switch minimizer_algorithm
else
newratflag=flag;
end
case 'robust'
robust = options_list{i,2};
case 'TolFun'
crit = options_list{i,2};
case 'verbosity'
@ -321,6 +324,7 @@ switch minimizer_algorithm
hess_info.gstep=options_.gstep;
hess_info.htol = 1.e-4;
hess_info.h1=options_.gradient_epsilon*ones(n_params,1);
hess_info.robust=robust;
% here we force 7th input argument (flagg) to be 0, since outer product
% gradient Hessian is handled in dynare_estimation_1
[opt_par_values,hessian_mat,gg,fval,invhess,new_rat_hess_info] = newrat(objective_function,start_par_value,bounds,analytic_grad,crit,nit,0,Verbose,Save_files,hess_info,prior_information.p2,options_.gradient_epsilon,parameter_names,varargin{:}); %hessian_mat is the plain outer product gradient Hessian

View File

@ -1,5 +1,5 @@
function [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,varargin)
% [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,varargin)
function [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,robust,varargin)
% [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon,parameter_names,robust,varargin)
%
% Gibbs type step in optimisation
%
@ -12,7 +12,7 @@ function [f0, x, ig] = mr_gstep(h1,x,bounds,func0,penalty,htol0,Verbose,Save_fil
% varargin{7} --> BoundsInfo
% varargin{8} --> oo_
% Copyright © 2006-2020 Dynare Team
% Copyright © 2006-2023 Dynare Team
%
% This file is part of Dynare.
%
@ -73,15 +73,86 @@ while i<n
gg(i)=(f1(i)'-f_1(i)')./(2.*h1(i));
hh(i) = 1/max(1.e-9,abs( (f1(i)+f_1(i)-2*f0)./(h1(i)*h1(i)) ));
if gg(i)*(hh(i)*gg(i))/2 > htol(i)
[f0, x, ~, ~] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
[ff, xx,~,retcode] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
if retcode && robust
if abs(x(i))<1.e-6
xa=transpose(linspace(x(i)/2, sign(x(i))*1.e-6*3/2, 7));
else
xa=transpose(linspace(x(i)/2, x(i)*3/2, 7));
end
fa=NaN(7,1);
for k=1:7
xh1(i)=xa(k);
fa(k,1) = penalty_objective_function(xh1,func0,penalty,varargin{:});
end
b=[ones(7,1) xa xa.*xa./2]\fa;
gg(i)=x(i)*b(3)+b(2);
hh(i)=1/b(3);
[ff2, xx2, ~, ~] = csminit1(func0,x,penalty,f0,gg,0,diag(hh),Verbose,varargin{:});
if ff2<ff
ff=ff2;
xx=xx2;
end
if min(fa)<ff
[ff, im]=min(fa);
xx(i)=xa(im);
end
end
ig(i)=1;
if robust
if not(isequal(xx , check_bounds(xx,bounds)))
xx = check_bounds(xx,bounds);
if xx(i)<x(i)
% lower bound
xx(i) = min(xx(i)+h1(i), 0.5*(xx(i)+x(i)));
else
% upper bound
xx(i) = max(xx(i)-h1(i), 0.5*(xx(i)+x(i)));
end
[ff,exit_flag]=penalty_objective_function(xx,func0,penalty,varargin{:});
if exit_flag~=1
disp_verbose('last step exited with bad status!',Verbose)
elseif ff<f0
f0=ff;
x=xx;
end
else
% check improvement wrt predicted one
if abs(f0-ff) < abs(gg(i)*(hh(i)*gg(i))/2/100) || abs(x(i)-xx(i))<1.e-10
[ff1, xx1] = csminit1(func0,x,penalty,f0,-gg,0,diag(hh),Verbose,varargin{:});
if not(isequal(xx1 , check_bounds(xx1,bounds)))
xx1 = check_bounds(xx1,bounds);
if xx1(i)<x(i)
% lower bound
xx1(i) = min(xx1(i)+h1(i), 0.5*(xx1(i)+x(i)));
else
% upper bound
xx1(i) = max(xx1(i)-h1(i), 0.5*(xx1(i)+x(i)));
end
[ff1,exit_flag]=penalty_objective_function(xx1,func0,penalty,varargin{:});
if exit_flag~=1
disp_verbose('last step exited with bad status!',Verbose)
end
end
if ff1<ff
ff=ff1;
xx=xx1;
end
end
f0=ff;
x=xx;
end
else
f0=ff;
x=xx;
x = check_bounds(x,bounds);
end
if Verbose
fprintf(['Done for param %s = %8.4f\n'],parameter_names{i},x(i))
fprintf(['Done for param %s = %8.4f; f = %8.4f\n'],parameter_names{i},x(i),f0)
end
end
xh1=x;
end
x = check_bounds(x,bounds);
if Save_files
save('gstep.mat','x','h1','f0')
end

View File

@ -183,7 +183,7 @@ while norm(gg)>gtol && check==0 && jit<nit
disp_verbose('last step exited with bad status!',Verbose)
end
end
[fvala, x0, ig] = mr_gstep(h1,x0,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon, parameter_names,varargin{:});
[fvala, x0, ig] = mr_gstep(h1,x0,bounds,func0,penalty,htol0,Verbose,Save_files,gradient_epsilon, parameter_names, hess_info.robust, varargin{:});
if not(isequal(x0 , check_bounds(x0,bounds)))
x0 = check_bounds(x0,bounds);
[fvala,exit_flag]=penalty_objective_function(x0,func0,penalty,varargin{:});