-
Notifications
You must be signed in to change notification settings - Fork 199
/
Copy pathBayesLinReg.m
36 lines (36 loc) · 1.13 KB
/
BayesLinReg.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
function [m S alpha beta marglik]=BayesLinReg(y,sumPhi,sumyPhi,opts)
%BAYESLINREG Bayesian Linear Regression training using basis functions phi(x)
% [m S alpha beta marglik]=BayesLinReg(y,sumPhi,sumyPhi,opts)
%
% Inputs:
% x : training inputs
% y : training outputs
% sumPhi : sum_n phi(x^n)phi(x^n)'
% sumyPhi : sum_n y^n phi(x^n)
% opts.maxits
% opts.tol
% opts.plotprogress
%
% Outputs:
% m : posterior mean weight
% S : posterior covariance matrix
% alpha : learned weight hyperparameter
% beta : learned noise hyperparamter
% marglik : model likelihood p(data|optimal hyperparameters)
B=length(sumyPhi); N=length(y);
sumyy=sum(y.^2);
beta=1; alpha=1; % initial guess for the hyperparameters
for loop=1:opts.maxits
invS=alpha*eye(B)+beta*sumPhi;
S=inv(invS);
m=beta*S*sumyPhi;
beta=N./(sumyy-2*m'*sumyPhi+m'*sumPhi*m+trace(S*sumPhi));
alpha=B/(trace(S)+m'*m);
d=beta*sumyPhi;
logpDgG(loop)=-beta*sumyy+d'*invS*d-log(det(S))+B*log(alpha)+N*log(beta)-N*log(2*pi);
if loop>1
if logpDgG(loop)-logpDgG(loop-1)<opts.tol; break; end
end
if opts.plotprogress; plot(logpDgG); drawnow;end
end
marglik=logpDgG(end);