%Create a decision tree, test it, and calculate accuracy/error function [test_labels, train_labels, error, tree] = k_make_tree_script(train_patterns, train_targets, test_patterns, test_targets, depth) ntest = length(test_targets); ntrain = length(train_targets); %Build tree tree = k_make_tree( train_patterns, train_targets, depth); tree; %Find labels (test) test_labels = k_classify_w_tree( test_patterns, tree ); %Find labels (train) train_labels = k_classify_w_tree( train_patterns, tree ); %Calculate accuracy, error check_test = test_labels.' - test_targets; num_incorrect_test = length(find(check_test ~= 0)); check_train = train_labels.' - train_targets; num_incorrect_train = length(find(check_train ~= 0)); error(1,1) = num_incorrect_train / ntrain; error(1,2) = num_incorrect_test / ntest; end % % plot(depths,error(:,1), '-ro', 'MarkerSize', 10 ); hold on; % plot(depths,error(:,2), '-bx', 'MarkerSize', 10 ); % grid on; % xlabel( 'tree depth' ); ylabel( 'error (%)' ); % %Plot decision boundaries % % x1 = -2:.01:2; x2 = -2:.01:2; % x1 = linspace(-2,+2,1500); x2 = linspace(-2,+2,1500); % % x1 = linspace(-5,+5,1500); x2 = linspace(-5,+5,1500); % [X1,X2] = meshgrid(x1,x2); % test_grid_pts = [X1(:) X2(:)]; % grid_labels = k_classify_w_tree( test_grid_pts, tree ); % figure; imagesc(x1,x2,reshape(grid_labels,length(x1),length(x2))); colorbar; % hold on; axis xy; % % % (or) % b = find(train_targets==0); % plot( train_patterns(b,1), train_patterns(b,2), 'wx' ); % g = find(train_targets==1); % plot( train_patterns(g,1), train_patterns(g,2), 'wo' ); % % % (or) % b = find(test_targets==0); % plot( test_patterns(b,1), test_patterns(b,2), 'wx' ); % g = find(test_targets==1); % plot( test_patterns(g,1), test_patterns(g,2), 'wo' );