% % Written by: % -- % John L. Weatherwax 2007-07-01 % % email: wax@alum.mit.edu % % Please send comments and especially bug reports to the % above email address. % % Problem: Epage 71 % %----- close all; clc; clear; rand('seed',0); randn('seed',0); nTraining = 50; nTesting = 50; % Generate training data like for Problem 2.12: % mu1 = [ 1, 1 ].'; mu2 = [ 1.5, 1.5 ].'; sigmasSquared = 0.2; d = size(mu1,1); nFeats = nTraining; X1 = mvnrnd( mu1, sigmasSquared*eye(d), nFeats ); X2 = mvnrnd( mu2, sigmasSquared*eye(d), nFeats ); if( 0 ) h1 = plot( X1(:,1), X1(:,2), '.b' ); hold on; h2 = plot( X2(:,1), X2(:,2), '.r' ); hold on; legend( [h1,h2], {'class 1', 'class 2'} ); end X_train = [ X1; X2 ]; labels_train = [ ones(nFeats,1); 2*ones(nFeats,1) ]; % Generate 100 new points from each class to classify: % nFeats = nTesting; X1 = mvnrnd( mu1, sigmasSquared*eye(d), nFeats ); X2 = mvnrnd( mu2, sigmasSquared*eye(d), nFeats ); X_test = [ X1; X2 ]; labels_test = [ ones(nFeats,1); 2*ones(nFeats,1) ]; % Classify each of the vectors in X_test using the NN and 3NN rules % addpath('../../../Duda_Hart_Stork/BookSupplements/ClassificationToolbox/Src'); for nni = 1:11 test_targets = Nearest_Neighbor( X_train.', labels_train, X_test.', nni); P_NN_error = sum( test_targets(:) ~= labels_test(:) )/length(test_targets); fprintf('P_e %2dNN= %10.6f; \n',nni,P_NN_error); end % Calculate the optimal Bayes error rate (using the results from Problem~2.9): % addpath('../../../Duda_Hart_Stork/Code/Chapter2/ComputerExercises'); dm = mahalanobis(mu1,mu2,sigmasSquared*eye(d)); P_B = 1 - normcdf( 0.5 * dm ); fprintf('P_B= %10.6f\n',P_B);