Commit 9b5277d0fc8fd37fe774fdb2847c208650cbc2e4

  • avatar
  • Kimmo Riihiaho <kimmo.riihiaho @gm…l.com> (Committer)
  • Sat Apr 21 20:32:23 EEST 2018
  • avatar
  • Kimmo Riihiaho <kimmo.riihiaho @gm…l.com> (Author)
  • Sat Apr 21 20:32:23 EEST 2018
testing classifiers
DataMiningProject/USPSreadwrite.m
(7 / 7)
  
3030
3131%%% To write images on disk %%%
3232
33 %j=3;
33 j=3;
3434
35 %for k=1:size(digits(j).dig,3)
35 for k=1:size(digits(j).dig,3)
3636
37 %t1=digits(j).dig(:,:,k);
38 %filename = (['usps_two_',num2str(k),'.tif']);
39 %imwrite( t1, filename );
37 t1=digits(j).dig(:,:,k);
38 filename = (['usps_two_',num2str(k),'.tif']);
39 imwrite( t1, filename );
4040
41 % end
41 end
4242
43% keyboard
43% keyboard
DataMiningProject/t1.m
(37 / 0)
  
1clear all
2close all
3clc
4
5
6load USPS % you will get two variables 'fea' and 'gnd'
7
8% Create 9298-by-257 table X where rows are observations
9% and columns 1-256 are variables. Column 257 contains classes i.e.
10% numbers 1-10.
11% Table X can be used with Classification learner app.
12Y = num2str(gnd - 1);
13X = fea;
14
15%[COEFF, SCORE] = pca(X);
16
17tree = fitctree(fea,Y,'KFold', 10);
18treeLoss = kfoldLoss(tree)
19tree2 = fitctree(fea,Y);
20treePrediction = predict(tree2, X(1,:))
21
22% numBranches = @(x)sum(x.IsBranch);
23% mdlDefaultNumSplits = cellfun(numBranches, tree.Trained);
24
25%figure;
26%histogram(mdlDefaultNumSplits);
27
28% view(tree.Trained{1},'Mode','graph')
29
30
31knn = fitcknn(fea, Y, 'NumNeighbors',10);
32
33rloss = resubLoss(knn)
34cvKnn = crossval(knn);
35rloss = kfoldLoss(cvKnn)
36
37jotai = predict(knn, X(1,:))