Skip to content

Commit 6bd269b

Browse files
committed
Fix cases when p=0 but q>0
Note that prediction is not meaningful when p=0
1 parent 370efb9 commit 6bd269b

File tree

3 files changed

+42
-34
lines changed

3 files changed

+42
-34
lines changed

RarmaFcns.m

+11-15
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,14 @@
3030
% return;
3131
% end
3232

33-
Xhist = RarmaFcns.generate_history(Xminusone, ardim);
34-
result = A*Xhist;
35-
result = [zeros(xdim, ardim) result];
36-
if numsamples < size(result,2), result = result(:, end-numsamples+1:end); end
33+
if ardim > 0
34+
Xhist = RarmaFcns.generate_history(Xminusone, ardim);
35+
result = A*Xhist;
36+
result = [zeros(xdim, ardim) result];
37+
if numsamples < size(result,2), result = result(:, end-numsamples+1:end); end
38+
else
39+
result = zeros(xdim, numsamples);
40+
end
3741
end
3842

3943
function result = computeMA(B, Epsilon, madim, xdim, numsamples)
@@ -42,15 +46,7 @@
4246
% Does not use Z^(j)_{:, T-madim+j+1:T}
4347
result = zeros(xdim, numsamples);
4448
T = size(B,2);
45-
% % JF: not working. This is a bug I never noticed, because at a
46-
% time, we decided that the prediction (in
47-
% convex_multi_view_model.m, line 300) should not use the "noise"
48-
% for future prediction.
49-
% for j = 1:madim
50-
% idx = RarmaFcns.blockInd(j,xdim);
51-
% Tidx = T:-1:max(T-numsamples+1,j);
52-
% result(:,Tidx) = result(:,Tidx) + B(idx, Tidx-j+1);
53-
% end
49+
5450
for j = 1:madim
5551
idx = RarmaFcns.blockInd(j,xdim);
5652
Tidx = numsamples:-1:max(1,numsamples-T+j);
@@ -115,8 +111,8 @@
115111
% Epsilonstart can either be Zstart or the epsilon; this is
116112
% determined by the size of the matrix
117113

118-
if isempty(Epsilonstart)
119-
Xiterated = iterate_predict_ar(Xstart, model, horizon, opts);
114+
if isempty(Epsilonstart)
115+
Xiterated = RarmaFcns.iterate_predict_ar(Xstart, model, horizon, opts);
120116
return;
121117
end
122118

demo.m

+8-6
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,15 @@
33

44
rng(10);
55
num_reps = 10;
6-
Models = cell(num_reps,1);
7-
Xpredictall = cell(num_reps,1);
8-
isStable = zeros(num_reps,1);
9-
Err = zeros(num_reps,1);
106

117
%% Generate data
128
opts = [];
139
opts.num_reps = num_reps;
1410
[Xstartall,Xtrainall,Xtestall] = genARMA(opts);
1511

1612
%% Learn RARMA
13+
Models = cell(num_reps,1);
14+
isStable = zeros(num_reps,1);
1715
% In practice, the following paramters should be
1816
% cross-validated for EACH dataset before applied
1917
opts = [];
@@ -23,10 +21,14 @@
2321
opts.reg_wgt_ma = 0.01;
2422
for ii = 1:num_reps
2523
Models{ii} = rarma(Xtrainall{ii},opts);
26-
isStable(ii) = RarmaUtilities.checkStable(Models{ii}.A);
24+
if opts.ardim > 0
25+
isStable(ii) = RarmaUtilities.checkStable(Models{ii}.A);
26+
end
2727
end
2828

29-
%% Prediction and Evaluation
29+
%% Prediction and Evaluation (only when ardim > 0)
30+
Xpredictall = cell(num_reps,1);
31+
Err = zeros(num_reps,1);
3032
for ii = 1:num_reps
3133
Xpredictall{ii} = Models{ii}.predict(Xtrainall{ii},...
3234
size(Xtestall{ii},2), opts);

rarma.m

+23-13
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
%
1515
% Authors: Junfeng Wen (University of Alberta)
1616
% Martha White (Indiana University)
17-
% Released: March 2015
17+
% Last Update: Nov 2015
1818

1919
if nargin < 1
2020
error('rarma requires at least data matrix X = [X1, ..., XT]');
@@ -46,6 +46,11 @@
4646
opts = RarmaUtilities.getOptions(opts, DEFAULTS);
4747
end
4848

49+
% Validity check
50+
if opts.madim < 0 || opts.ardim < 0
51+
error('Incorrect parameters');
52+
end
53+
4954
% Solve for RARMA, AR or MA dependning on madim and ardim choices
5055
ar_solver = @solve_rarma;
5156
if opts.madim < 1
@@ -64,8 +69,13 @@
6469
% Initialize variables for learning
6570
[xdim, numsamples] = size(X);
6671
sizeA = [xdim, xdim*opts.ardim];
67-
Ainit = initAParams(sizeA);
6872
sizeZ = [xdim*opts.madim, numsamples];
73+
if opts.ardim > 0
74+
Ainit = initAParams();
75+
end
76+
if opts.madim > 0
77+
Zinit = zeros(sizeZ);
78+
end
6979

7080
% START the outer optimization; save any learned variables in model along the way
7181
model = [];
@@ -90,32 +100,32 @@
90100
% Note: the code is currently not well-designed to do long-horizon prediction
91101
% with only a moving average models, since future moving average
92102
% components are not imputed; should work, however, for 1-step prediction
93-
[Z, obj, iter, msg] = opts.optimizer(@(Zvec)(objZ(Zvec, zeros(sizeA))), zeros(sizeZ));
103+
[Z, obj, iter, msg] = opts.optimizer(@(Zvec)(objZ(Zvec, zeros(sizeA))), Zinit(:));
94104
model.Z = reshape(Z, sizeZ);
95105
model.A = zeros(sizeA);
96106
if opts.recover == 1
97107
[model.B, model.Epsilon] = recoverModels(Z);
98-
end
99-
model.predict = @(Xstart, horizon, ...
100-
opts)(RarmaFcns.iterate_predict(Xstart, [], model, horizon, opts));
108+
end
109+
% Cannot really 'predict' without autoregressive component
110+
model.predict = @(Xstart, horizon, opts)...
111+
(RarmaFcns.iterate_predict(Xstart, [], model, horizon, opts));
101112
end
102113

103114

104115
function [model,obj] = solve_rarma()
105116
%% SOLVE_RARMA
106117
% Solve for A first (with Z = 0), then iterate between Z and A
107-
Z = zeros(sizeZ);
108-
[A, obj, iter, msg] =opts.optimizer(@(Avec)(objA(Avec, X, Z)), Ainit(:));
118+
[A, obj, iter, msg] = opts.optimizer(@(Avec)(objA(Avec, X, Zinit)), Ainit(:));
109119
A = reshape(A, sizeA);
110-
[Z, prev_obj] = iterateZ(Z,A,opts.init_stepsize);
120+
[Z, prev_obj] = iterateZ(Zinit, A, opts.init_stepsize);
111121

112122
for i = 1:opts.maxiter
113123
% Do A first since it returns the incorrect obj
114-
A = iterateA(A,Z,opts.init_stepsize/i); % adaptive stepsize
115-
[Z, obj] = iterateZ(Z,A,opts.init_stepsize/i);
124+
A = iterateA(A, Z, opts.init_stepsize/i); % adaptive stepsize
125+
[Z, obj] = iterateZ(Z, A, opts.init_stepsize/i);
116126

117127
if abs(prev_obj-obj) < opts.TOL % doing minimization
118-
break;
128+
break;
119129
end
120130
prev_obj = obj;
121131
end
@@ -189,7 +199,7 @@
189199
f = f + opts.reg_wgt_ma * f2;
190200
end
191201

192-
function Aparams = initAParams(sizeA)
202+
function Aparams = initAParams()
193203
% INITAPARAMS
194204
% Could initialize in many ways; for speed, we choose
195205
% a regression between X = AXhist

0 commit comments

Comments
 (0)