Updated version following new revision of dsgelikelihood.m

git-svn-id: https://www.dynare.org/svn/dynare/trunk@3318 ac1d8469-bf42-47a9-8791-bf33cf982152
time-shift
ratto 2010-01-08 10:06:21 +00:00
parent e7adf28fbd
commit 16794e4ec9
1 changed files with 268 additions and 259 deletions

View File

@ -1,4 +1,4 @@
function [fval,llik,cost_flag,ys,trend_coeff,info] = DsgeLikelihood_hh(xparam1,gend,data,data_index,number_of_observations,no_more_missing_observations) function [fval,llik,cost_flag,ys,trend_coeff,info] = DsgeLikelihood(xparam1,gend,data,data_index,number_of_observations,no_more_missing_observations)
% function [fval,cost_flag,ys,trend_coeff,info] = DsgeLikelihood(xparam1,gend,data,data_index,number_of_observations,no_more_missing_observations) % function [fval,cost_flag,ys,trend_coeff,info] = DsgeLikelihood(xparam1,gend,data,data_index,number_of_observations,no_more_missing_observations)
% Evaluates the posterior kernel of a dsge model. % Evaluates the posterior kernel of a dsge model.
% %
@ -38,283 +38,292 @@ function [fval,llik,cost_flag,ys,trend_coeff,info] = DsgeLikelihood_hh(xparam1,g
% You should have received a copy of the GNU General Public License % You should have received a copy of the GNU General Public License
% along with Dynare. If not, see <http://www.gnu.org/licenses/>. % along with Dynare. If not, see <http://www.gnu.org/licenses/>.
global bayestopt_ estim_params_ options_ trend_coeff_ M_ oo_ global bayestopt_ estim_params_ options_ trend_coeff_ M_ oo_
fval = []; fval = [];
ys = []; ys = [];
trend_coeff = []; trend_coeff = [];
llik = NaN; cost_flag = 1;
cost_flag = 1; nobs = size(options_.varobs,1);
nobs = size(options_.varobs,1); llik=NaN;
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
% 1. Get the structural parameters & define penalties % 1. Get the structural parameters & define penalties
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
if options_.mode_compute ~= 1 & any(xparam1 < bayestopt_.lb) if options_.mode_compute ~= 1 & any(xparam1 < bayestopt_.lb)
k = find(xparam1 < bayestopt_.lb); k = find(xparam1 < bayestopt_.lb);
fval = bayestopt_.penalty+sum((bayestopt_.lb(k)-xparam1(k)).^2); fval = bayestopt_.penalty+sum((bayestopt_.lb(k)-xparam1(k)).^2);
cost_flag = 0; cost_flag = 0;
info = 41; info = 41;
return; return;
end end
if options_.mode_compute ~= 1 & any(xparam1 > bayestopt_.ub) if options_.mode_compute ~= 1 & any(xparam1 > bayestopt_.ub)
k = find(xparam1 > bayestopt_.ub); k = find(xparam1 > bayestopt_.ub);
fval = bayestopt_.penalty+sum((xparam1(k)-bayestopt_.ub(k)).^2); fval = bayestopt_.penalty+sum((xparam1(k)-bayestopt_.ub(k)).^2);
cost_flag = 0; cost_flag = 0;
info = 42; info = 42;
return; return;
end end
Q = M_.Sigma_e; Q = M_.Sigma_e;
H = M_.H; H = M_.H;
for i=1:estim_params_.nvx for i=1:estim_params_.nvx
k =estim_params_.var_exo(i,1); k =estim_params_.var_exo(i,1);
Q(k,k) = xparam1(i)*xparam1(i); Q(k,k) = xparam1(i)*xparam1(i);
end end
offset = estim_params_.nvx; offset = estim_params_.nvx;
if estim_params_.nvn if estim_params_.nvn
for i=1:estim_params_.nvn for i=1:estim_params_.nvn
k = estim_params_.var_endo(i,1); k = estim_params_.var_endo(i,1);
H(k,k) = xparam1(i+offset)*xparam1(i+offset); H(k,k) = xparam1(i+offset)*xparam1(i+offset);
end end
offset = offset+estim_params_.nvn; offset = offset+estim_params_.nvn;
end end
if estim_params_.ncx if estim_params_.ncx
for i=1:estim_params_.ncx for i=1:estim_params_.ncx
k1 =estim_params_.corrx(i,1); k1 =estim_params_.corrx(i,1);
k2 =estim_params_.corrx(i,2); k2 =estim_params_.corrx(i,2);
Q(k1,k2) = xparam1(i+offset)*sqrt(Q(k1,k1)*Q(k2,k2)); Q(k1,k2) = xparam1(i+offset)*sqrt(Q(k1,k1)*Q(k2,k2));
Q(k2,k1) = Q(k1,k2); Q(k2,k1) = Q(k1,k2);
end end
[CholQ,testQ] = chol(Q); [CholQ,testQ] = chol(Q);
if testQ %% The variance-covariance matrix of the structural innovations is not definite positive. if testQ %% The variance-covariance matrix of the structural innovations is not definite positive.
%% We have to compute the eigenvalues of this matrix in order to build the penalty. %% We have to compute the eigenvalues of this matrix in order to build the penalty.
a = diag(eig(Q)); a = diag(eig(Q));
k = find(a < 0); k = find(a < 0);
if k > 0 if k > 0
fval = bayestopt_.penalty+sum(-a(k)); fval = bayestopt_.penalty+sum(-a(k));
cost_flag = 0; cost_flag = 0;
info = 43; info = 43;
return return
end end
end end
offset = offset+estim_params_.ncx; offset = offset+estim_params_.ncx;
end end
if estim_params_.ncn if estim_params_.ncn
for i=1:estim_params_.ncn for i=1:estim_params_.ncn
k1 = options_.lgyidx2varobs(estim_params_.corrn(i,1)); k1 = options_.lgyidx2varobs(estim_params_.corrn(i,1));
k2 = options_.lgyidx2varobs(estim_params_.corrn(i,2)); k2 = options_.lgyidx2varobs(estim_params_.corrn(i,2));
H(k1,k2) = xparam1(i+offset)*sqrt(H(k1,k1)*H(k2,k2)); H(k1,k2) = xparam1(i+offset)*sqrt(H(k1,k1)*H(k2,k2));
H(k2,k1) = H(k1,k2); H(k2,k1) = H(k1,k2);
end end
[CholH,testH] = chol(H); [CholH,testH] = chol(H);
if testH if testH
a = diag(eig(H)); a = diag(eig(H));
k = find(a < 0); k = find(a < 0);
if k > 0 if k > 0
fval = bayestopt_.penalty+sum(-a(k)); fval = bayestopt_.penalty+sum(-a(k));
cost_flag = 0; cost_flag = 0;
info = 44; info = 44;
return return
end end
end end
offset = offset+estim_params_.ncn; offset = offset+estim_params_.ncn;
end end
if estim_params_.np > 0 if estim_params_.np > 0
M_.params(estim_params_.param_vals(:,1)) = xparam1(offset+1:end); M_.params(estim_params_.param_vals(:,1)) = xparam1(offset+1:end);
end end
M_.Sigma_e = Q; M_.Sigma_e = Q;
M_.H = H; M_.H = H;
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
% 2. call model setup & reduction program % 2. call model setup & reduction program
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
[T,R,SteadyState,info] = dynare_resolve(bayestopt_.restrict_var_list,... [T,R,SteadyState,info] = dynare_resolve(bayestopt_.restrict_var_list,...
bayestopt_.restrict_columns,... bayestopt_.restrict_columns,...
bayestopt_.restrict_aux); bayestopt_.restrict_aux);
if info(1) == 1 | info(1) == 2 | info(1) == 5 if info(1) == 1 || info(1) == 2 || info(1) == 5
fval = bayestopt_.penalty+1; fval = bayestopt_.penalty+1;
cost_flag = 0; cost_flag = 0;
return return
elseif info(1) == 3 | info(1) == 4 | info(1) == 20 elseif info(1) == 3 || info(1) == 4 || info(1)==6 ||info(1) == 19 || info(1) == 20 || info(1) == 21
fval = bayestopt_.penalty+info(2);%^2; % penalty power raised in DR1.m and resol already. GP July'08 fval = bayestopt_.penalty+info(2);
cost_flag = 0; cost_flag = 0;
return return
end end
bayestopt_.mf = bayestopt_.mf1; bayestopt_.mf = bayestopt_.mf1;
if ~options_.noconstant if options_.noconstant
if options_.loglinear == 1 constant = zeros(nobs,1);
constant = log(SteadyState(bayestopt_.mfys)); else
else if options_.loglinear
constant = SteadyState(bayestopt_.mfys); constant = log(SteadyState(bayestopt_.mfys));
end else
else constant = SteadyState(bayestopt_.mfys);
constant = zeros(nobs,1); end
end end
if bayestopt_.with_trend == 1 if bayestopt_.with_trend
trend_coeff = zeros(nobs,1); trend_coeff = zeros(nobs,1);
t = options_.trend_coeffs; t = options_.trend_coeffs;
for i=1:length(t) for i=1:length(t)
if ~isempty(t{i}) if ~isempty(t{i})
trend_coeff(i) = evalin('base',t{i}); trend_coeff(i) = evalin('base',t{i});
end end
end end
trend = repmat(constant,1,gend)+trend_coeff*[1:gend]; trend = repmat(constant,1,gend)+trend_coeff*[1:gend];
else else
trend = repmat(constant,1,gend); trend = repmat(constant,1,gend);
end end
start = options_.presample+1; start = options_.presample+1;
np = size(T,1); np = size(T,1);
mf = bayestopt_.mf; mf = bayestopt_.mf;
no_missing_data_flag = (number_of_observations==gend*nobs); no_missing_data_flag = (number_of_observations==gend*nobs);
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
% 3. Initial condition of the Kalman filter % 3. Initial condition of the Kalman filter
%------------------------------------------------------------------------------ %------------------------------------------------------------------------------
kalman_algo = options_.kalman_algo; kalman_algo = options_.kalman_algo;
if options_.lik_init == 1 % Kalman filter if options_.lik_init == 1 % Kalman filter
if kalman_algo ~= 2 if kalman_algo ~= 2
kalman_algo = 1; kalman_algo = 1;
end end
Pstar = lyapunov_symm(T,R*Q*R',options_.qz_criterium,options_.lyapunov_complex_threshold); Pstar = lyapunov_symm(T,R*Q*R',options_.qz_criterium,options_.lyapunov_complex_threshold);
Pinf = []; Pinf = [];
elseif options_.lik_init == 2 % Old Diffuse Kalman filter elseif options_.lik_init == 2 % Old Diffuse Kalman filter
if kalman_algo ~= 2 if kalman_algo ~= 2
kalman_algo = 1; kalman_algo = 1;
end end
Pstar = options_.Harvey_scale_factor*eye(np); Pstar = options_.Harvey_scale_factor*eye(np);
Pinf = []; Pinf = [];
elseif options_.lik_init == 3 % Diffuse Kalman filter elseif options_.lik_init == 3 % Diffuse Kalman filter
if kalman_algo ~= 4 if kalman_algo ~= 4
kalman_algo = 3; kalman_algo = 3;
end end
[QT,ST] = schur(T); [QT,ST] = schur(T);
e1 = abs(ordeig(ST)) > 2-options_.qz_criterium; e1 = abs(ordeig(ST)) > 2-options_.qz_criterium;
[QT,ST] = ordschur(QT,ST,e1); [QT,ST] = ordschur(QT,ST,e1);
k = find(abs(ordeig(ST)) > 2-options_.qz_criterium); k = find(abs(ordeig(ST)) > 2-options_.qz_criterium);
nk = length(k); nk = length(k);
nk1 = nk+1; nk1 = nk+1;
Pinf = zeros(np,np); Pinf = zeros(np,np);
Pinf(1:nk,1:nk) = eye(nk); Pinf(1:nk,1:nk) = eye(nk);
Pstar = zeros(np,np); Pstar = zeros(np,np);
B = QT'*R*Q*R'*QT; B = QT'*R*Q*R'*QT;
for i=np:-1:nk+2 for i=np:-1:nk+2
if ST(i,i-1) == 0 if ST(i,i-1) == 0
if i == np if i == np
c = zeros(np-nk,1); c = zeros(np-nk,1);
else else
c = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i,i+1:end)')+... c = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i,i+1:end)')+...
ST(i,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i); ST(i,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i);
end end
q = eye(i-nk)-ST(nk1:i,nk1:i)*ST(i,i); q = eye(i-nk)-ST(nk1:i,nk1:i)*ST(i,i);
Pstar(nk1:i,i) = q\(B(nk1:i,i)+c); Pstar(nk1:i,i) = q\(B(nk1:i,i)+c);
Pstar(i,nk1:i-1) = Pstar(nk1:i-1,i)'; Pstar(i,nk1:i-1) = Pstar(nk1:i-1,i)';
else else
if i == np if i == np
c = zeros(np-nk,1); c = zeros(np-nk,1);
c1 = zeros(np-nk,1); c1 = zeros(np-nk,1);
else else
c = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i,i+1:end)')+... c = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i,i+1:end)')+...
ST(i,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i)+... ST(i,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i)+...
ST(i,i-1)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i-1); ST(i,i-1)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i-1);
c1 = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i-1,i+1:end)')+... c1 = ST(nk1:i,:)*(Pstar(:,i+1:end)*ST(i-1,i+1:end)')+...
ST(i-1,i-1)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i-1)+... ST(i-1,i-1)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i-1)+...
ST(i-1,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i); ST(i-1,i)*ST(nk1:i,i+1:end)*Pstar(i+1:end,i);
end end
q = [eye(i-nk)-ST(nk1:i,nk1:i)*ST(i,i) -ST(nk1:i,nk1:i)*ST(i,i-1);... q = [eye(i-nk)-ST(nk1:i,nk1:i)*ST(i,i) -ST(nk1:i,nk1:i)*ST(i,i-1);...
-ST(nk1:i,nk1:i)*ST(i-1,i) eye(i-nk)-ST(nk1:i,nk1:i)*ST(i-1,i-1)]; -ST(nk1:i,nk1:i)*ST(i-1,i) eye(i-nk)-ST(nk1:i,nk1:i)*ST(i-1,i-1)];
z = q\[B(nk1:i,i)+c;B(nk1:i,i-1)+c1]; z = q\[B(nk1:i,i)+c;B(nk1:i,i-1)+c1];
Pstar(nk1:i,i) = z(1:(i-nk)); Pstar(nk1:i,i) = z(1:(i-nk));
Pstar(nk1:i,i-1) = z(i-nk+1:end); Pstar(nk1:i,i-1) = z(i-nk+1:end);
Pstar(i,nk1:i-1) = Pstar(nk1:i-1,i)'; Pstar(i,nk1:i-1) = Pstar(nk1:i-1,i)';
Pstar(i-1,nk1:i-2) = Pstar(nk1:i-2,i-1)'; Pstar(i-1,nk1:i-2) = Pstar(nk1:i-2,i-1)';
i = i - 1; i = i - 1;
end end
end end
if i == nk+2 if i == nk+2
c = ST(nk+1,:)*(Pstar(:,nk+2:end)*ST(nk1,nk+2:end)')+ST(nk1,nk1)*ST(nk1,nk+2:end)*Pstar(nk+2:end,nk1); c = ST(nk+1,:)*(Pstar(:,nk+2:end)*ST(nk1,nk+2:end)')+ST(nk1,nk1)*ST(nk1,nk+2:end)*Pstar(nk+2:end,nk1);
Pstar(nk1,nk1)=(B(nk1,nk1)+c)/(1-ST(nk1,nk1)*ST(nk1,nk1)); Pstar(nk1,nk1)=(B(nk1,nk1)+c)/(1-ST(nk1,nk1)*ST(nk1,nk1));
end end
Z = QT(mf,:); Z = QT(mf,:);
R1 = QT'*R; R1 = QT'*R;
[QQ,RR,EE] = qr(Z*ST(:,1:nk),0); [QQ,RR,EE] = qr(Z*ST(:,1:nk),0);
k = find(abs(diag([RR; zeros(nk-size(Z,1),size(RR,2))])) < 1e-8); k = find(abs(diag([RR; zeros(nk-size(Z,1),size(RR,2))])) < 1e-8);
if length(k) > 0 if length(k) > 0
k1 = EE(:,k); k1 = EE(:,k);
dd =ones(nk,1); dd =ones(nk,1);
dd(k1) = zeros(length(k1),1); dd(k1) = zeros(length(k1),1);
Pinf(1:nk,1:nk) = diag(dd); Pinf(1:nk,1:nk) = diag(dd);
end end
end end
if kalman_algo == 2 if kalman_algo == 2
no_correlation_flag = 1; end
if length(H)==1 kalman_tol = options_.kalman_tol;
H = zeros(nobs,1); riccati_tol = options_.riccati_tol;
else mf = bayestopt_.mf1;
if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal... Y = data-trend;
H = diag(H); %------------------------------------------------------------------------------
else % 4. Likelihood evaluation
no_correlation_flag = 1; %------------------------------------------------------------------------------
end if (kalman_algo==1)% Multivariate Kalman Filter
end if no_missing_data_flag
end [LIK, lik] = kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol);
kalman_tol = options_.kalman_tol; else
riccati_tol = options_.riccati_tol; [LIK, lik] = ...
mf = bayestopt_.mf1; missing_observations_kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol, ...
Y = data-trend; data_index,number_of_observations,no_more_missing_observations);
%------------------------------------------------------------------------------ end
% 4. Likelihood evaluation if isinf(LIK)
%------------------------------------------------------------------------------ kalman_algo = 2;
if (kalman_algo==1)% Multivariate Kalman Filter end
if no_missing_data_flag end
[LIK, lik] = kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol); if (kalman_algo==2)% Univariate Kalman Filter
else no_correlation_flag = 1;
[LIK, lik] = ... if length(H)==1 & H == 0
missing_observations_kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol, ... H = zeros(nobs,1);
data_index,number_of_observations,no_more_missing_observations); else
end if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal...
if isinf(LIK) H = diag(H);
kalman_algo = 2; else
end no_correlation_flag = 0;
end end
if (kalman_algo==2)% Univariate Kalman Filter end
if no_correlation_flag if no_correlation_flag
[LIK, lik] = univariate_kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol,data_index,number_of_observations,no_more_missing_observations); [LIK, lik] = univariate_kalman_filter(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol,data_index,number_of_observations,no_more_missing_observations);
else else
[LIK, lik] = univariate_kalman_filter_corr(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol,data_index,number_of_observations,no_more_missing_observations); [LIK, lik] = univariate_kalman_filter_corr(T,R,Q,H,Pstar,Y,start,mf,kalman_tol,riccati_tol,data_index,number_of_observations,no_more_missing_observations);
end end
end end
if (kalman_algo==3)% Multivariate Diffuse Kalman Filter if (kalman_algo==3)% Multivariate Diffuse Kalman Filter
if no_missing_data_flag if no_missing_data_flag
data1 = data - trend; [LIK, lik] = diffuse_kalman_filter(ST,R1,Q,H,Pinf,Pstar,Y,start,Z,kalman_tol, ...
if any(any(H ~= 0)) riccati_tol);
[LIK, lik] = DiffuseLikelihoodH1_Z(ST,Z,R1,Q,H,Pinf,Pstar,data1,start); else
else [LIK, lik] = missing_observations_diffuse_kalman_filter(ST,R1,Q,H,Pinf, ...
[LIK, lik] = DiffuseLikelihood1_Z(ST,Z,R1,Q,Pinf,Pstar,data1,start); Pstar,Y,start,Z,kalman_tol,riccati_tol,...
end data_index,number_of_observations,...
if isinf(LIK) no_more_missing_observations);
kalman_algo = 4; end
end if isinf(LIK)
else kalman_algo = 4;
error(['The diffuse filter is not yet implemented for models with missing observations']) end
end end
end if (kalman_algo==4)% Univariate Diffuse Kalman Filter
if (kalman_algo==4)% Univariate Diffuse Kalman Filter no_correlation_flag = 1;
data1 = data - trend; if length(H)==1 & H == 0
if any(any(H ~= 0)) H = zeros(nobs,1);
if ~estim_params_.ncn else
[LIK, lik] = DiffuseLikelihoodH3_Z(ST,Z,R1,Q,H,Pinf,Pstar,data1,trend,start); if all(all(abs(H-diag(diag(H)))<1e-14))% ie, the covariance matrix is diagonal...
else H = diag(H);
[LIK, lik] = DiffuseLikelihoodH3corr_Z(ST,Z,R1,Q,H,Pinf,Pstar,data1,trend,start); else
end no_correlation_flag = 0;
else end
[LIK, lik] = DiffuseLikelihood3_Z(ST,Z,R1,Q,Pinf,Pstar,data1,start); end
end if no_correlation_flag
end [LIK, lik] = univariate_diffuse_kalman_filter(ST,R1,Q,H,Pinf,Pstar,Y, ...
if imag(LIK) ~= 0 start,Z,kalman_tol,riccati_tol,data_index,...
likelihood = bayestopt_.penalty; number_of_observations,no_more_missing_observations);
else else
likelihood = LIK; [LIK, lik] = univariate_diffuse_kalman_filter_corr(ST,R1,Q,H,Pinf,Pstar, ...
end Y,start,Z,kalman_tol,riccati_tol,...
% ------------------------------------------------------------------------------ data_index,number_of_observations,...
% Adds prior if necessary no_more_missing_observations);
% ------------------------------------------------------------------------------ end
lnprior = priordens(xparam1,bayestopt_.pshape,bayestopt_.p6,bayestopt_.p7,bayestopt_.p3,bayestopt_.p4); end
fval = (likelihood-lnprior); if imag(LIK) ~= 0
options_.kalman_algo = kalman_algo; likelihood = bayestopt_.penalty;
llik=[-lnprior; lik(start:end)]; else
likelihood = LIK;
end
% ------------------------------------------------------------------------------
% Adds prior if necessary
% ------------------------------------------------------------------------------
lnprior = priordens(xparam1,bayestopt_.pshape,bayestopt_.p6,bayestopt_.p7,bayestopt_.p3,bayestopt_.p4);
fval = (likelihood-lnprior);
options_.kalman_algo = kalman_algo;
llik=[-lnprior; lik(start:end)];