dynare_minimize_objective: fix use of analytic derivatives by employing wrapper function

time-shift
Johannes Pfeifer 2021-01-22 20:09:32 +01:00
parent ff4278070a
commit 25803a6842
3 changed files with 67 additions and 31 deletions

View File

@ -182,9 +182,9 @@ if strcmp(options_mom_.mom.mom_method,'GMM')
if options_mom_.order > 3
error('method_of_moments: perturbation orders higher than 3 are not implemented for GMM estimation, try using SMM.\n');
end
options_mom_.mom = set_default_option(options_mom_.mom,'analytic_standard_errors',false); % compute standard errors numerically (0) or analytically (1). Analytical derivatives are only available for GMM.
options_mom_.mom = set_default_option(options_mom_.mom,'analytic_jacobian',false); % use analytic Jacobian in optimization, only available for GMM and gradient-based optimizers
end
options_mom_.mom = set_default_option(options_mom_.mom,'analytic_standard_errors',false); % compute standard errors numerically (0) or analytically (1). Analytical derivatives are only available for GMM.
options_mom_.mom = set_default_option(options_mom_.mom,'analytic_jacobian',false); % use analytic Jacobian in optimization, only available for GMM and gradient-based optimizers
% initialize flag to compute derivs in objective function (needed for GMM with either analytic_standard_errors or analytic_jacobian )
options_mom_.mom.compute_derivs = false;

View File

@ -31,4 +31,7 @@ function [fval, grad, hess, exit_flag]=analytic_gradient_wrapper(x, fcn, varargi
% You should have received a copy of the GNU General Public License
% along with Dynare. If not, see <http://www.gnu.org/licenses/>.
[fval, info, exit_flag, grad, hess] = fcn(x, varargin{:});
[fval, info, exit_flag, grad, hess] = fcn(x, varargin{:});
if size(grad,2)==1
grad=grad'; %should be row vector for Matlab; exception lsqnonlin where Jacobian is required
end

View File

@ -77,24 +77,37 @@ switch minimizer_algorithm
% is not able to even move away from the initial point.
optim_options = optimoptions(optim_options, 'Algorithm','active-set');
end
if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1)
optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX
end
if ~isempty(options_.optim_opt)
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end
if options_.silent_optimizer
optim_options = optimoptions(optim_options,'display','off');
end
if options_.analytic_derivation
optim_options = optimoptions(optim_options,'GradObj','on','TolX',1e-7); %alter default TolX
end
if ~isoctave
[opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:});
if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1) %use wrapper
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
if ~isoctave
[opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
else
% Under Octave, use a wrapper, since fmincon() does not have an 11th
% arg. Also, only the first 4 output arguments are available.
[opt_par_values,fval,exitflag,output] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
end
else
% Under Octave, use a wrapper, since fmincon() does not have an 11th
% arg. Also, only the first 4 output arguments are available.
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag,output] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
if ~isoctave
[opt_par_values,fval,exitflag,output,lamdba,grad,hessian_mat] = ...
fmincon(objective_function,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options,varargin{:});
else
% Under Octave, use a wrapper, since fmincon() does not have an 11th
% arg. Also, only the first 4 output arguments are available.
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag,output] = ...
fmincon(func,start_par_value,[],[],[],[],bounds(:,1),bounds(:,2),[],optim_options);
end
end
case 2
@ -159,20 +172,28 @@ switch minimizer_algorithm
if ~isempty(options_.optim_opt)
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end
if options_.analytic_derivation
optim_options = optimoptions(optim_options,'GradObj','on');
end
if options_.silent_optimizer
optim_options = optimoptions(optim_options,'display','off');
end
if ~isoctave
[opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:});
if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1)
optim_options = optimoptions(optim_options,'GradObj','on');
if ~isoctave
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
end
else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
if ~isoctave
[opt_par_values,fval,exitflag] = fminunc(objective_function,start_par_value,optim_options,varargin{:});
else
% Under Octave, use a wrapper, since fminunc() does not have a 4th arg
func = @(x) objective_function(x,varargin{:});
[opt_par_values,fval,exitflag] = fminunc(func,start_par_value,optim_options);
end
end
case 4
% Set default options.
H0 = 1e-4*eye(n_params);
@ -505,7 +526,12 @@ switch minimizer_algorithm
if options_.silent_optimizer
solveoptoptions.verbosity = 0;
end
[opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:});
if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1)
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,fval]=solvopt(start_par_value,func,1,[],[],solveoptoptions);
else
[opt_par_values,fval]=solvopt(start_par_value,objective_function,[],[],[],solveoptoptions,varargin{:});
end
case 102
if isoctave
error('Optimization algorithm 2 is not available under Octave')
@ -534,15 +560,22 @@ switch minimizer_algorithm
eval(['optim_options = optimoptions(optim_options,' options_.optim_opt ');']);
end
if options_.silent_optimizer
optim_options = optimoptions(optim_options,'Display','off');
optim_options.Display='off';
end
if ~isoctave
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:});
if options_.analytic_derivation || (isfield(options_,'mom') && options_.mom.analytic_jacobian==1)
optim_options.SpecifyObjectiveGradient=true;
func = @(x) analytic_gradient_wrapper(x,objective_function,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = ...
lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
else
% Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg
func = @(x)objective_function(x,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
end
if ~isoctave
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(objective_function,start_par_value,bounds(:,1),bounds(:,2),optim_options,varargin{:});
else
% Under Octave, use a wrapper, since lsqnonlin() does not have a 6th arg
func = @(x)objective_function(x,varargin{:});
[opt_par_values,Resnorm,fval,exitflag,OUTPUT,LAMBDA,JACOB] = lsqnonlin(func,start_par_value,bounds(:,1),bounds(:,2),optim_options);
end
end
otherwise
if ischar(minimizer_algorithm)
if exist(minimizer_algorithm)