大家好 我用了这个源代码
function [parameters, loglikelihood, Ht, likelihoods, stdresid, stderrors, A, B, scores] = full_bekk_mvgarch(data,p,q, BEKKoptions);
% PURPOSE:
% To Estimate a full BEKK multivariate GARCH model. %
%
% USAGE:
% [parameters, loglikelihood, Ht, likelihoods, stdresid, stderrors, A, B, scores] = full_bekk_mvgarch(data,p,q,options);
%
%
% INPUTS:
% data - A t by k matrix of zero mean residuals
% p - The lag length of the innovation process
% q - The lag length of the AR process
% options - (optional) Options for the optimization(fminunc)
%
% OUTPUTS:
% parameters - A (k*(k+1))/2+p*k^2+q*k^2 vector of estimated parameteters. F
% or any k^2 set of Innovation or AR parameters X,
% reshape(X,k,k) will give the correct matrix
% To recover C, use ivech(parmaeters(1:(k*(k+1))/2)
% loglikelihood - The loglikelihood of the function at the optimum
% Ht - A k x k x t 3 dimension matrix of conditional covariances
% likelihoods - A t by 1 vector of individual likelihoods
% stdresid - A t by k matrix of multivariate standardized residuals
% stderrors - A numParams^2 square matrix of robust Standad Errors(A^(-1)*B*A^(-1)*t^(-1))
% A - The estimated inverse of the non-robust Standard errors
% B - The estimated covariance of teh scores
% scores - A t by numParams matrix of individual scores
% need to try and get some smart startgin values
if size(data,2) > size(data,1)
data=data';
end
[t k]=size(data);
k2=k*(k+1)/2;
scalaropt=optimset('fminunc');
scalaropt=optimset(scalaropt,'TolFun',1e-1,'Display','iter','Diagnostics','on','DiffMaxChange',1e-2);
startingparameters=scalar_bekk_mvgarch(data,p,q,scalaropt);
CChol=startingparameters(1:(k*(k+1))/2);
C=ivech(startingparameters(1:(k*(k+1))/2))*ivech(startingparameters(1:(k*(k+1))/2))';
newA=[];
newB=[];
for i=1:p
newA=[newA diag(ones(k,1))*startingparameters(((k*(k+1))/2)+i)];
end
for i=1:q
newB=[newB diag(ones(k,1))*startingparameters(((k*(k+1))/2)+i+p)];
end
newA=reshape(newA,k*k*p,1);
newB=reshape(newB,k*k*q,1);
startingparameters=[CChol;newA;newB];
if nargin<=3 | isempty(BEKKoptions)
options=optimset('fminunc');
options.Display='iter';
options.Diagnostics='on';
options.TolX=1e-4;
options.TolFun=1e-4;
options.MaxFunEvals=5000*length(startingparameters);
options.MaxIter=5000*length(startingparameters);
else
options=BEKKoptions;
end
parameters=fminunc('full_bekk_mvgarch_likelihood',startingparameters,options,data,p,q,k,k2,t);
[loglikelihood,likelihoods,Ht]=full_bekk_mvgarch_likelihood(parameters,data,p,q,k,k2,t);
loglikelihood=-loglikelihood;
likelihoods=-likelihoods;
% Standardized residuals
stdresid=zeros(size(data));
for i=1:t
stdresid(i,:)=data(i,:)*Ht(:,:,i)^(-0.5);
end
%Std Errors
if nargout>=6
A=hessian_2sided('full_bekk_mvgarch_likelihood',parameters,data,p,q,k,k2,t);
h=max(abs(parameters/2),1e-2)*eps^(1/3);
hplus=parameters+h;
hminus=parameters-h;
likelihoodsplus=zeros(t,length(parameters));
likelihoodsminus=zeros(t,length(parameters));
for i=1:length(parameters)
hparameters=parameters;
hparameters(i)=hplus(i);
[HOLDER, indivlike] = full_bekk_mvgarch_likelihood(hparameters,data,p,q,k,k2,t);
likelihoodsplus(:,i)=indivlike;
end
for i=1:length(parameters)
hparameters=parameters;
hparameters(i)=hminus(i);
[HOLDER, indivlike] = full_bekk_mvgarch_likelihood(hparameters,data,p,q,k,k2,t);
likelihoodsminus(:,i)=indivlike;
end
scores=(likelihoodsplus-likelihoodsminus)./(2*repmat(h',t,1));
B=cov(scores);
A=A/t;
stderrors=A^(-1)*B*A^(-1)*t^(-1);
end
然后出来的结果是这样
parameters =
-0.5804
-1.3458
-0.1689
2.2979
0.0869
0.1618
1.1404
0.2854
0.0222
0.0378
1.0745
0.0072
0.0760
0.1526
1.0116
0.0000
0.0000
0.0000
-0.0000
0.0000
0.0000
-0.0000
-0.0000
0.0001
Ht
val(:,:,1) =
0.3437 0.7849 0.1013
0.7849 7.0938 0.4288
0.1013 0.4288 0.0638
val(:,:,2) =
34.0053 41.1017 27.9163
41.1017 55.3789 33.7416
27.9163 33.7416 23.0469
val(:,:,3) =
34.0624 41.6003 28.0812
41.6003 56.4968 34.2960
28.0812 34.2960 23.2808
val(:,:,4) =
33.8926 41.4722 27.8987
41.4722 56.4355 34.1393
27.8987 34.1393 23.0949
val(:,:,5) =
33.8041 41.2873 27.4702
41.2873 56.1174 33.5562
27.4702 33.5562 22.4494
val(:,:,6) =
33.9644 41.3480 27.2928
41.3480 56.0301 33.2337
27.2928 33.2337 22.0547
val(:,:,7) =
34.2687 41.5123 27.1801
41.5123 55.9848 32.9358
27.1801 32.9358 21.6772
val(:,:,8) =
34.6165 41.8540 28.2687
41.8540 56.3040 34.1802
28.2687 34.1802 23.2126
val(:,:,9) =
34.7279 42.0222 28.2464
42.0222 56.5474 34.1821
28.2464 34.1821 23.1011
val(:,:,10) =
34.8838 42.1895 28.6618
42.1895 56.7245 34.6640
28.6618 34.6640 23.6791
val(:,:,11) =
34.5067 41.9003 28.2063
41.9003 56.5736 34.2518
28.2063 34.2518 23.1842
val(:,:,12) =
34.2872 41.7850 28.0333
41.7850 56.6148 34.1662
28.0333 34.1662 23.0482
val(:,:,13) =
34.2872 41.7767 28.1257
41.7767 56.5946 34.2708
28.1257 34.2708 23.2004
val(:,:,14) =
34.2769 41.7343 28.0296
41.7343 56.5073 34.1302
28.0296 34.1302 23.0490
val(:,:,15) =
34.5165 41.8314 28.0754
41.8314 56.3937 34.0282
28.0754 34.0282 22.9628
val(:,:,16) =
34.6405 41.8935 28.1238
41.8935 56.3643 34.0155
28.1238 34.0155 22.9591
val(:,:,17) =
34.4716 41.7853 27.9648
41.7853 56.3478 33.9018
27.9648 33.9018 22.8120
val(:,:,18) =
34.5402 41.8332 28.1137
41.8332 56.3639 34.0524
28.1137 34.0524 23.0097
val(:,:,19) =
34.5515 41.8867 28.3077
41.8867 56.4763 34.3183
28.3077 34.3183 23.3209
val(:,:,20) =
34.4037 41.7846 28.0050
41.7846 56.4444 34.0164
28.0050 34.0164 22.9232
val(:,:,21) =
34.8090 42.0257 28.2741
42.0257 56.4394 34.1387
28.2741 34.1387 23.0922
val(:,:,22) =
34.3959 41.7580 27.9933
41.7580 56.3917 33.9882
27.9933 33.9882 22.9093
val(:,:,23) =
34.1381 41.6266 28.0080
41.6266 56.4497 34.1535
28.0080 34.1535 23.1077
val(:,:,24) =
33.5451 41.2177 27.8280
41.2177 56.3302 34.1929
27.8280 34.1929 23.2176
val(:,:,25) =
33.2610 40.9830 27.4282
40.9830 56.1802 33.7985
27.4282 33.7985 22.7489
val(:,:,26) =
33.5606 41.4775 27.6517
41.4775 56.9416 34.1781
27.6517 34.1781 22.9136
val(:,:,27) =
33.8297 41.4955 27.5416
41.4955 56.5848 33.7879
27.5416 33.7879 22.5492
val(:,:,28) =
35.0818 42.2619 28.2274
42.2619 56.6142 34.0097
28.2274 34.0097 22.8357
val(:,:,29) =
35.5107 42.5064 28.3764
42.5064 56.5887 33.9725
28.3764 33.9725 22.7969
val(:,:,30) =
35.4499 42.4587 28.2142
42.4587 56.5613 33.7996
28.2142 33.7996 22.5757
val(:,:,31) =
35.0797 42.2332 28.2176
42.2332 56.5489 33.9769
28.2176 33.9769 22.8213
val(:,:,32) =
33.8428 41.3958 27.6241
41.3958 56.3234 33.7931
27.6241 33.7931 22.6757
val(:,:,33) =
34.2471 41.6559 27.8913
41.6559 56.3616 33.9285
27.8913 33.9285 22.8419
val(:,:,34) =
29.9220 38.3825 26.1603
38.3825 54.8814 33.5510
26.1603 33.5510 23.0212
val(:,:,35) =
34.2352 41.5762 27.9202
41.5762 56.1868 33.9098
27.9202 33.9098 22.8974
val(:,:,36) =
33.4603 41.0648 27.5139
41.0648 56.0835 33.7693
27.5139 33.7693 22.7540
极大似然估计值
-517.556473556469
请问大侠们 为什么参数有24个?他们代表什么?C A B 的矩阵数值没这么多啊?还有Ht的值为什么有36个?希望能得到解答