-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmain_DatasetsTraining.m
64 lines (55 loc) · 1.65 KB
/
main_DatasetsTraining.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
% Author: Erik Zamora, [email protected], see License.txt
clear, close all, clc
addpath(genpath('DMNSoftmax_SGD'))
% HERE YOU CAN CHANGE DATASET
dataset = 'B';
load(['Datasets/', dataset,'.mat']);
% Dataset filenames
% A, B, iris, liver, glassC, pageblocks, letterrecognition
% miceprotein, MNIST_1000, CIFAR10_1000
global tol
tol = 1e-10; N = size(P,1); Nc = max(T);
Qtrain = size(P,2); Qtest = size(Ptest,2);
%% Initialization method
% CHOOSE INICIALIZATION METHOD
tic
% 1) Hyperbox per Class
% M = 0.0;
% dendrite = hb_per_class(P,T,M);
% 2) Divide Hyperbox per Class
M = 0.0;
n = 2;
dendrite = nhb_per_class(P,T,M,n);
% 3) Divide & conquer (You can create new pretraining models with DCtpretraining.m)
% load(['pretrainingD&C/pretrainingD&C_',dataset,'.mat'])
% 4) K-means
% yo = 0.5;
% S = 4;
% dendrite = h_kmeans(P',T',S,yo);
% 5) Random
% dendrite = nhb_per_class_random(P,T,1);
%% Learning by Stochastic Gradient Machine
% Adjustable parameters for learning
alfa = 1e-2;
Neph= 1000;
Qbatch = 10;
for eph=1:Neph
indq = randi(Qtrain,1,Qbatch);
[cost(eph), grad] = dmnsoftmax_costgrad(dendrite,P(:,indq),T(:,indq));
for c=1:Nc
dendrite(c).W = dendrite(c).W - alfa*grad(c).W;
dendrite(c).B = dendrite(c).B - alfa*grad(c).B;
end
eph
if mod(eph,50) == 0
pause(0.01)
plot(cost,'r'), title('Cost vs Epochs')
end
end
toc
plot(cost,'r'), title('Cost vs Epochs')
% Clasification Errors
Etrain = dmnsoftmax_errorrate(P,T,dendrite)
Etest = dmnsoftmax_errorrate(Ptest,Ttest,dendrite)
NH = dendritenumber(dendrite)
rmpath(genpath('DMNSoftmax_SGD'))