% % Written by: % -- % John L. Weatherwax 2007-07-05 % % email: wax@alum.mit.edu % % Please send comments and especially bug reports to the % above email address. % %----- randn('seed',0); rand('seed',0); addpath( fullfile(pwd,'AdaBoost') ); % the dimension of the training data: dim = 10; [patterns_train,labels_train] = gen_data_pt_b(2000,dim); % the code below expects class labels of 0's and 1's (and a column): indsm1 = find( labels_train==-1 ); labels_train( indsm1 ) = 0; labels_train = labels_train(:); [patterns_test, labels_test ] = gen_data_pt_b(10000,dim); % the code below expects class labels of 0's and 1's (and a column): indsm1 = find( labels_test==-1 ); labels_test( indsm1 ) = 0; labels_test = labels_test(:); clear indsm1; %%--------------------------------------------------- %FIXED TREE DEPTH, VARYING NUMBER OF WEAK LEARNERS: %Choose the DEPTH of tree to boost with (1=stumps) depth = 1; %Choose the NUMBER of weak learners (trees) %T_array = [ 1, 10:10:30 ]; %T_array = [ 1, 10:10:400 ]; T_array = [ 1, 10:10:800 ]; err = zeros(length(T_array),2); tms = zeros(length(T_array),1); for ii = 1:length(T_array) T = T_array(ii); fprintf('boosting T=%d trees',T); if( ii>1 ) fprintf('; previous round took=(%10.3f sec,%10.3f min,%10.3f hrs)...\n',tms(ii-1),tms(ii-1)/60,tms(ii-1)/3600); else fprintf('...\n'); end tic; [classes, B_t, e_t] = k_adaboost_training(patterns_train, labels_train, depth, T); [final_class_train] = k_adaboost_evaluate(patterns_train, classes, B_t); error_train = 1-mean(final_class_train==labels_train); [final_class_test] = k_adaboost_evaluate(patterns_test, classes, B_t); error_test = 1-mean(final_class_test==labels_test); tms(ii) = toc; err(ii,1) = error_train; err(ii,2) = error_test; end h_train = plot(T_array,err(:,1),'-gx'); hold on h_test = plot(T_array,err(:,2),'-bo'); legend( [ h_train, h_test ], { 'training err', 'testing err' } ); xlabel( 'number of weak learner boosts' ); ylabel( 'classification error' ); axis( [ 0, max(T_array), 0.0, 0.6 ] ); %%---------------------------------------------------