clear close all load('Zemris.mat') testimages=trainimages; testlabels=trainlabels; load('LPALIC_Without_Zemris.mat') %% Define the Network Layers layers = [ imageInputLayer([28 28 1],'Normalization','none','Name','input') convolution2dLayer(5,64,'Name','conv1') batchNormalizationLayer('Name','BN1') reluLayer('Name','relu1') % 24x24 convolution2dLayer(5,128,'Name','conv2') batchNormalizationLayer('Name','BN2') reluLayer('Name','relu2') % 20x20 maxPooling2dLayer(2,'Stride',2,'Name','max1') % 10x10 convolution2dLayer(5,176,'Name','conv3') batchNormalizationLayer('Name','BN3') reluLayer('Name','relu3') % 6x6 convolution2dLayer(5,208,'Name','conv4') batchNormalizationLayer('Name','BN4') reluLayer('Name','relu4') % 2x2 maxPooling2dLayer(2,'Stride',2,'Name','max2') fullyConnectedLayer(35,'Name','fc') softmaxLayer('Name','softmax') classificationLayer('Name','classOutput')]; %% Specify the Training Options options = trainingOptions('sgdm','LearnRateSchedule','piecewise',... 'MiniBatchSize',120,'LearnRateDropFactor',0.7,... 'LearnRateDropPeriod',2,... 'MaxEpochs',10, ... 'InitialLearnRate',1/40,... 'Shuffle','every-epoch'); convnet = trainNetwork(trainimages,trainlabels,layers,options); % load('Zemris_result_9790.mat') YTest = classify(convnet,testimages); accuracy = sum(YTest == testlabels)/numel(testlabels); Error=(1-accuracy)*100 %% Fine tune with ADAM lgraph = convnet; BZ=240; LR=0.006/BZ; options = trainingOptions('adam','LearnRateSchedule','piecewise',... 'MiniBatchSize',BZ,'LearnRateDropFactor',0.95,... 'verbose',true,... 'LearnRateDropPeriod',1,... 'MaxEpochs',1, ... 'InitialLearnRate',LR,... 'Shuffle','every-epoch'); convnet = trainNetwork(trainimages,lgraph,options); YTest = classify(convnet,testimages); accuracy = sum(YTest == testlabels)/numel(testlabels); Error=(1-accuracy)*100