% BoostLogPost23.m 2004/6/12
%
% This program reads DataGen23.mat and makes figures with classified test pixels
%
% 1. Calculate posterior prob. of all pixels based on Gaussian distribution
% with common variance-covariance matrix
% 2. Define posterior prob.: p = exp(F(x,y))/sum(F(x,1:g)) based on the classifier F
% 3. Find F maximizing sum of the log posteriors log p_1 + ... + log p_n
% 4. Tune the coefficients for the sum of log posteriors
% based on ring neighborhoods with radisu r
% 5. Derive a new classifier by combining the classifiers
%
% Input image: DataGen23.mat
%
% Initial weight for training data
% uniform = 1: weight 1/gg for all pixels (gg : sample size of categories)
% uniform = 0: weight 1/n_g for all pixels (n_g: number of g-th categories)
% uniform = 0: カテゴリ g の点に 1/n_g
format compact; clear; close all; diary off;
global bb gg cat lab n mis correct onen
global NBDi NBDj NBDsize ssi ssj len
delete BoostLogPost23.dia; diary BoostLogPost23.dia;
% range of squared radius r2
nbdr2set = [1 2 4 5 8 9 10 13 16 17 18 20 25 26 29 32 34 36 37 40 45 49 50];
lennbd = length(nbdr2set);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% 教師データ, テストデータの読み込み
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
load DataGen23.mat
bb = 4; gg = 3; cat = zeros(1, gg); pp = bb;
col = [ 1 0 0 ; 1 1 1; 0 1 0; 0 0 1; 1 1 0; 0 1 1 ; 1 0 1; ...
1/2 0 0; 0 1/2 0; 0 0 1/2; 1/2 1/2 0; 0 1/2 1/2; 1/2 0 1/2];
mono = [ 0 0 0 ; 1 1 1; 1/2 1/2 1/2];
test = TestImage(:, :, :, 1); % feature vector at test pixel (i,j)
train = TestImage(:, :, :, 2); % feature vector at training pixel (i,j)
datatrain = reshape(train, lenv*lenh, bb); % feature matrix based on test pixels
% which are linearly numbered
datatest = reshape(test , lenv*lenh, bb); % feature matrix based on training pixels
% which are linearly numbered
clear TestImage
Ztest = Ztest( 1:lenv, 1:lenh); % matrix with training labels
Ztrain = Ztest; % = matrix with test labels
for g = 1:gg
cat(g) = length(find(Ztrain(:) == g));
end % g
n = sum(cat);
fprintf('Sample sizes of categories of test data = ( %s) , Total sample size = %d\n', ...
num2str(cat), n)
onen = ones( n, 1) ;
lab = zeros(n, gg); % lab : label matrix
tlab = zeros(n, 1); % tlab: true label matrix
for g = 1:gg
tmp = find( Ztrain(:) == g ); lab(tmp, g) = 1;
tlab = tlab + g * lab(:, g);
end % g
for uniform = 1:1
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Initial weight for training data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if uniform == 1
weight0 = ones(n, gg) / gg;
tmp = find(Ztrain(:) > 0); weight0 = weight0(tmp, :); clear tmp lab
% Skip data with label 0
fprintf('\n\n*** Initial weight for training data = 1/gg = uniform ***\n')
else
% weight0 = zeros(n, gg);
% for y = 1:gg
% for ye = [1:y-1 y+1:gg]
% weight0(find(Ztrain(:) == y), ye) = 1/(gg*(gg-1)*cat(y));
% end
% end
fprintf('*** Initial weight for training data = 1/n_g ***\n')
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Define LDF and classify traing and test data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
mu = zeros(bb, gg); C = zeros(bb);
for g = 1:gg
tmp = find( Ztrain(:) == g );
mu(:, g) = ( mean( datatrain(tmp, :) ) )';
C = C + (length(tmp)-1) * cov( datatrain(tmp, :) );
end % g
C = C / n; Ci = inv(C);
fprintf('Estimated mean vectors based on training data\n')
mu
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% LDFtrainPost: n x gg, LDFtestPost: n x gg poterior prob.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
LDFtrain = tlab; LDFtest = tlab; tmp1 = zeros(gg, 1); tmp2 = zeros(gg, 1);
LDFtrainPost = zeros(gg, n); LDFtestPost = zeros(gg, n);
for s = 1:n
for g = 1:gg
tmp1(g) = (datatrain(s, :) - mu(:, g)') * Ci * (datatrain(s, :) - mu(:, g)')';
tmp2(g) = (datatest( s, :) - mu(:, g)') * Ci * (datatest( s, :) - mu(:, g)')';
end
[dmy, LDFtrain(s)] = min(tmp1); [dmy, LDFtest(s)] = min(tmp2); % classify pixel s
tmp1 = exp( -tmp1/2 ); tmp2 = exp( -tmp2/2 );
LDFtrainPost(:, s) = log( tmp1 / sum( tmp1 ) ); % log posteriors
LDFtestPost( :, s) = log( tmp2 / sum( tmp2 ) ); % log posteriors
end % s % Misclassification rates for training and test data due to LDF
LDFtrainPost = LDFtrainPost'; LDFtestPost = LDFtestPost';
betaNBD = zeros(lennbd+1, 1);% Coefficient for posteriors of neighborhodds
% Store misclassification rates due to LDF and LDF with neighbors
errtrain = zeros(lennbd+1, 1); errtrain(1) = length(find(tlab ~= LDFtrain))/n; errtest = zeros(lennbd+1, 1); errtest( 1) = length(find(tlab ~= LDFtest ))/n;
FUSIONtrain = zeros(n, 1); FUSIONtest = zeros(n, 1);
tmp1 = LDFtrainPost; tmp2 = LDFtestPost;
for s = 1:n
[dmy, FUSIONtrain(s)] = max(tmp1(s, :));
[dmy, FUSIONtest(s) ] = max(tmp2(s, :));
end % s
fprintf('\nLDF classification, radius^2 of NBD = 0, Training error = %f, Test error = %f\n', ...
errtrain(1), errtest(1) )
%%%%%%%%%%%%%%%%%%%% Make figures of classified test data %%%%%%%%%%%%%%%%%%%%
CC = reshape(FUSIONtest, [lenv, lenh]); nbdr2 = 0;
fname = ['testCOLORr2_', num2str(nbdr2), '.fig'];
figure, imshow(CC, colormap(col));
title('Test Data 20'); saveas(gcf, fname)
fname = ['testMONOr2_', num2str(nbdr2), '.fig'];
figure, imshow(CC, colormap(mono));
title('Test Data 20'); saveas(gcf, fname)
fname = ['testMONOepsr2_', num2str(nbdr2), '.eps'];
figure, imshow(CC, colormap(mono)); print(gcf, '-deps', fname)
pause(0.1); close all
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Calculation of coefficient beta and new weight matrix with initial weight weight0
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
[betaLDF, weightLDF] = OptBetaLogPost(tlab, LDFtrainPost', weight0);
fprintf('coefficeint for LDF = %f\n\n', betaLDF)
betaNBD(1) = betaLDF; % Coefficient for LDF
weightNBD = weightLDF; % weight matrix is iteritiely renewed by LDF due to traing data
LDFtrainPost2dim = reshape(LDFtrainPost', [gg, lenv, lenh]);% gg x lenv x lenh
LDFtestPost2dim = reshape(LDFtestPost' , [gg, lenv, lenh]);
tmp1 = betaLDF*LDFtrainPost; % classifier initial = LDF
tmp2 = betaLDF*LDFtestPost; % classifiers with larger radius
countnbd = 1;
for nbdr2 = nbdr2set;
tic
fprintf('ring neighborhood，radius^2 = %d, initial weights for training data uniform = %d\n', ...
nbdr2, uniform)
%%%%% Derive neighborhood with radius^2 from nbdr2-1 to nbdr2 %%%%%
NBDcircle(reshape(tlab, [lenv, lenh]), 'LDFtrainNBD', nbdr2);
fprintf('\nCalculation of coefficient for posteriors in ring neighborhood\n')
countnbd = countnbd + 1;
% Calculate posteriros of center pixel based on neighbors
NBDtrainPost = zeros(gg, n); NBDtestPost = zeros(gg, n);
for s = 1:len
sumposttrain = zeros(gg, 1); sumposttest = zeros(gg, 1);
for k = 1:NBDsize(s)
sumposttrain = sumposttrain + LDFtrainPost2dim(:, NBDi(k, s), NBDj(k, s) );
sumposttest = sumposttest + LDFtestPost2dim( :, NBDi(k, s), NBDj(k, s) );
end % k
NBDtrainPost(:, s) = sumposttrain/NBDsize(s); % averaged log posteriors
NBDtestPost( :, s) = sumposttest /NBDsize(s);
end % s
NBDtrainPost = NBDtrainPost'; NBDtestPost = NBDtestPost';
[betaNBD(countnbd), weightNBD] = OptBetaLogPost(tlab, NBDtrainPost', weightLDF);
tmp1 = tmp1 + betaNBD(countnbd) * NBDtrainPost;
tmp2 = tmp2 + betaNBD(countnbd) * NBDtestPost;
for s = 1:n
[dmy, FUSIONtrain(s)] = max(tmp1(s, :));
[dmy, FUSIONtest( s)] = max(tmp2(s, :));
end % s
errtrain(countnbd) = length(find(tlab ~= FUSIONtrain))/n;
errtest( countnbd) = length(find(tlab ~= FUSIONtest ))/n;
fprintf('Fusion classification, radius^2 of NBD = %d, Training error = %f, Test error = %f\n\n', ...
nbdr2, errtrain(countnbd) , errtest(countnbd) )
%%%%%%%%%%%%%%%%%%%% Make figures of classified test data %%%%%%%%%%%%%%%%%%%%
CC = reshape(FUSIONtest, [lenv, lenh]);
fname = ['testCOLORr2_', num2str(nbdr2), '.fig'];
figure, imshow(CC, colormap(col));
title('Test Data 20'); saveas(gcf, fname)
fname = ['testMONOr2_', num2str(nbdr2), '.fig'];
figure, imshow(CC, colormap(mono));
title('Test Data 20'); saveas(gcf, fname)
fname = ['testMONOepsr2_', num2str(nbdr2), '.eps'];
figure, imshow(CC, colormap(mono)); print(gcf, '-deps', fname)
pause(0.1); close all
toc
end % nbdr2
end % uniform
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Draw graphs of coefficients, training error rates and test error rates
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
figure, plot([0 nbdr2set], betaNBD')
title('coefficients for LDF and neighborhoods')
saveas(gcf, 'beta.fig')
pause(1)
figure, plot([0 nbdr2set], errtrain)
title('training error, uniform = 1')
saveas(gcf, 'errtrain.fig')
pause(1)
figure, plot([0 nbdr2set], errtest)
title('test error, uniform = 1')
saveas(gcf, 'errtest.fig')
pause(1)
%save RealboostNBD
diary off