diff --git a/matlab/dynare_estimation.m b/matlab/dynare_estimation.m index 348bdf8c4..e0f8907c1 100644 --- a/matlab/dynare_estimation.m +++ b/matlab/dynare_estimation.m @@ -91,7 +91,7 @@ else dynare_estimation_1(var_list,dname); end -if options_.mode_compute && options_.analytic_derivation, +if isnumeric(options_.mode_compute) && options_.mode_compute && options_.analytic_derivation, options_.analytic_derivation=analytic_derivation0; end diff --git a/matlab/dynare_estimation_1.m b/matlab/dynare_estimation_1.m index 5fdd91895..eba52ea28 100644 --- a/matlab/dynare_estimation_1.m +++ b/matlab/dynare_estimation_1.m @@ -261,14 +261,14 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation [xparam1, fval, exitflag, hh, options_, Scale] = dynare_minimize_objective(objective_function,xparam1,options_.mode_compute,options_,[bounds.lb bounds.ub],bayestopt_.name,bayestopt_,hh,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_); fprintf('\nFinal value of minus the log posterior (or likelihood):%f \n', fval); - if options_.mode_compute==5 && options_.analytic_derivation==-1 %reset options changed by newrat + if isnumeric(options_.mode_compute) && options_.mode_compute==5 && options_.analytic_derivation==-1 %reset options changed by newrat options_.analytic_derivation = options_analytic_derivation_old; %reset - elseif options_.mode_compute==6 %save scaling factor + elseif isnumeric(options_.mode_compute) && options_.mode_compute==6 %save scaling factor save([M_.fname '_optimal_mh_scale_parameter.mat'],'Scale'); options_.mh_jscale = Scale; bayestopt_.jscale = ones(length(xparam1),1)*Scale; end - if ~isequal(options_.mode_compute,6) %always already computes covariance matrix + if ~isnumeric(options_.mode_compute) || ~isequal(options_.mode_compute,6) %always already computes covariance matrix if options_.cova_compute == 1 %user did not request covariance not to be computed if options_.analytic_derivation && strcmp(func2str(objective_function),'dsge_likelihood'), ana_deriv_old = options_.analytic_derivation; @@ -276,12 +276,12 @@ if ~isequal(options_.mode_compute,0) && ~options_.mh_posterior_mode_estimation [junk1, junk2, hh] = feval(objective_function,xparam1, ... dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_); options_.analytic_derivation = ana_deriv_old; - elseif ~(isequal(options_.mode_compute,5) && newratflag~=1), + elseif ~isnumeric(options_.mode_compute) || ~(isequal(options_.mode_compute,5) && newratflag~=1), % with flag==0, we force to use the hessian from outer % product gradient of optimizer 5 hh = reshape(hessian(objective_function,xparam1, ... options_.gstep,dataset_,dataset_info,options_,M_,estim_params_,bayestopt_,bounds,oo_),nx,nx); - elseif isequal(options_.mode_compute,5) + elseif isnumeric(options_.mode_compute) && isequal(options_.mode_compute,5) % other numerical hessian options available with optimizer 5 % % if newratflag == 0 diff --git a/matlab/optimization/dynare_minimize_objective.m b/matlab/optimization/dynare_minimize_objective.m index 77b1449dc..27456dbac 100644 --- a/matlab/optimization/dynare_minimize_objective.m +++ b/matlab/optimization/dynare_minimize_objective.m @@ -46,14 +46,14 @@ function [opt_par_values,fval,exitflag,hessian_mat,options_,Scale]=dynare_minimi %% set bounds and parameter names if not already set n_params=size(start_par_value,1); if isempty(bounds) - if minimizer_algorithm==10 + if isnumeric(minimizer_algorithm) && minimizer_algorithm==10 error('Algorithm 10 (simpsa) requires upper and lower bounds') else bounds=[-Inf(n_params,1) Inf(n_params,1)]; end end -if minimizer_algorithm==10 && any(any(isinf(bounds))) +if isnumeric(minimizer_algorithm) && minimizer_algorithm==10 && any(any(isinf(bounds))) error('Algorithm 10 (simpsa) requires finite upper and lower bounds') end