Skip to content

Commit

Permalink
more rls things
Browse files Browse the repository at this point in the history
  • Loading branch information
Jakob Voigts committed Jan 5, 2013
1 parent 303e300 commit 1389401
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 0 deletions.
8 changes: 8 additions & 0 deletions rls/KernelMatrix.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function K = KernelMatrix(X,kernelfun);
[N,d] = size(X);
K=zeros(N);
for i=1:N
for j=1:N
K(i,j)=kernelfun(X(i,:),X(j,:));
end;
end;
9 changes: 9 additions & 0 deletions rls/kernel_gauss.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
function d = kernel_gauss(a,b);
sigma = 500000;

N=size(a,1);
if N==1
d=exp(-( (sum((a-b).^2)) ./ (sigma^2) ));
else
d=exp(-( sum( ( a-repmat(b,[N 1]))'.^2 ) ./ (sigma^2) ))';
end;
2 changes: 2 additions & 0 deletions rls/kernel_lin.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
function d = kernel_lin(a,b);
d=a*b';
3 changes: 3 additions & 0 deletions rls/kernel_poly.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
function d = kernel_gauss(a,b);
deg = 5;
d=(a*b'+1).^deg;
10 changes: 10 additions & 0 deletions rls/rlsAssignkernelfun.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
switch whichKernel
case 'linear'
kernelfun = @kernel_lin;
case 'gaussian'
kernelfun = @kernel_gauss;
case 'polynomial'
kernelfun = @kernel_poly;
otherwise
error('specified kernel not found');
end
8 changes: 8 additions & 0 deletions rls/rlsKernelMatrix.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
function K = KernelMatrix(X,kernelfun);
N= numel(X);
K=zeros(N);
for i=1:N
for j=1:N
K(i,j)=kernelfun(X(j),X(i));
end;
end;
37 changes: 37 additions & 0 deletions rls/rlsLOO.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
function [looe,coeffs] = rlsLOO(Ytrain,Ktrain,lambdas)
%efficient leave one out error for a set of lambdas
% function looe = rlsLOO(Ytrain,Ktrain,lambdas)
%
% returns:
% looe vector of loo errors, one per supplied lambda
% coeffs optimal RLS coefficients for all lambdas
%
% takes:
% Ytrain training labels
% Ktrain Kernel Matrix of training set
% lambdas set of resularization parameters

%% eigendecompose Ktrain
[Q,v]=eig(Ktrain);
N=size(Ktrain,2);

%% run different lambdas:
for l=1:length(lambdas)

% get coeffs
Ginv = Q*diag(1./(diag(v)+lambdas(l)))*Q';
coeffs(:,l)=Ginv*Ytrain;

% compute leave-one-out error
looe(l)= sum( ( coeffs(:,l)./diag(Ginv) ).^2 );

%{
%sanitycheck
KGinv=Ktrain*Ginv;
KGinvy=KGinv*Ytrain;
for i=1:N
y(i) = (KGinvy(i)-KGinv(i,i)*Ytrain(i))./(1-KGinv(i,i));
end;
looe(l)= sum( ( Ytrain-y' ).^2 );
%}
end;

0 comments on commit 1389401

Please sign in to comment.