-
Notifications
You must be signed in to change notification settings - Fork 1
/
TwoDtest.m
69 lines (53 loc) · 2.15 KB
/
TwoDtest.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
% TestWaldBoostExample
% load data set
%load alldata features Y
% features=features(700:1300,:);
% Y=Y(700:1300);
% load Features-7000.mat features Y
%features=features(700:1300,:);
%Y=Y(700:1300);
features = [ mvnrnd([1 1],[1 0; 0 1],100); mvnrnd([-2 -2],[2 0; 0 2],100) ];
Y = [ones(1,100) ones(1,100)-2];
trainingRate=0.5; % percentage of training part of overall data
testTimes=1; % test times
T=10; % test iteration for every test
[BoostInfomation]=testWaldBoost(features,Y,trainingRate,testTimes,T)
%return;
for i=1:length(BoostInfomation)
BoostInfo=BoostInfomation{i};
Hypothesis{i}=BoostInfo.Hypothesis;
trainError{i}=BoostInfo.trainError;
trainOverallError{i}=BoostInfo.trainOverallError;
testError{i}=BoostInfo.testError;
testOverallError{i}=BoostInfo.testOverallError;
TP{i}=BoostInfo.testTPRate;
FP{i}=BoostInfo.testFPRate;
costTime{i}=BoostInfo.costTime;
end
figure(1002);hold on,
grid on,
xlabel('trainning iter');
ylabel(strcat('Boost classifier error rate ( testing',num2str(testTimes),'times ) '));
testingNum=ceil((1-trainingRate)*size(features,1)); % testing sample size
trainingNum=size(features,1)-testingNum; % training sample size
title(strcat('Boost classifier error rate',' ( trainning',num2str(trainingNum),'times,testing',num2str(testingNum),'times )'));
testRange=1:T;
for i=1:testTimes
plot(testRange,trainError{i}(testRange),'m-');
plot(testRange,testError{i}(testRange),'c-');
end
legend('WaldBoost trainning error','WaldBoost testing error');
figure(1003);hold on,
grid on,
xlabel('trainning iter');
ylabel(strcat('Boost classifier error rate ( testing',num2str(testTimes),'times ) '));
testingNum=ceil((1-trainingRate)*size(features,1));
trainingNum=size(features,1)-testingNum;
title(strcat('Boost Overall classifier error rate',' ( trainning',num2str(trainingNum),'times,testing',num2str(testingNum),'times )'));
testRange=1:T;
for i=1:testTimes
plot(testRange,trainOverallError{i}(testRange),'m-');
plot(testRange,testOverallError{i}(testRange),'c-');
end
legend('WaldBoost Overall trainning error','WaldBoost Overall testing error');
return;