clear close all load('LPALIC_Latin_ManSplit.mat') n=length(trainimages); k=ceil(0.2*length(trainimages)); rng('shuffle'); ind=randperm(n,k); testimages=trainimages(:,:,1,ind) ; testlabels=trainlabels(ind); indx=1:n; indx(ind)=[]; trainimage=trainimages(:,:,:,indx); trainlabels(ind)=[]; %% Define the Network Layers layers = [ imageInputLayer([28 28 1],'Normalization','none','Name','input') convolution2dLayer(5,64,'Name','conv1') batchNormalizationLayer('Name','BN1') reluLayer('Name','relu1') % 24x24 convolution2dLayer(5,128,'Name','conv2') batchNormalizationLayer('Name','BN2') reluLayer('Name','relu2') % 20x20 maxPooling2dLayer(2,'Stride',2,'Name','max1') % 10x10 convolution2dLayer(5,176,'Name','conv3') batchNormalizationLayer('Name','BN3') reluLayer('Name','relu3') % 6x6 convolution2dLayer(5,208,'Name','conv4') batchNormalizationLayer('Name','BN4') reluLayer('Name','relu4') % 2x2 maxPooling2dLayer(2,'Stride',2,'Name','max2') fullyConnectedLayer(35,'Name','fc') softmaxLayer('Name','softmax') classificationLayer('Name','classOutput')]; %% Specify the Training Options % After defining the layers (network structure), specify the training % options. Set the options to default settings for the stochastic gradient % descent with momentum. Set the maximum number of epochs at 15 (an epoch % is a full training cycle on the whole training data), and start the % training with an initial learning rate of 0.0001. options = trainingOptions('sgdm','LearnRateSchedule','piecewise',... 'MiniBatchSize',120,'LearnRateDropFactor',0.7,... 'LearnRateDropPeriod',2,... 'MaxEpochs',15, ... 'InitialLearnRate',1/10,... 'Shuffle','every-epoch'); convnet = trainNetwork(trainimage,trainlabels,layers,options); YTest = classify(convnet,testimages); accuracy = sum(YTest == testlabels)/numel(testlabels); Error=(1-accuracy)*100 % YTrainTest = classify(convnet,trainimage); % Taccuracy = sum(YTrainTest == trainlabels)/numel(trainlabels); % TError=(1-Taccuracy)*100