%% TrainDNN % This script is to set up parameters for training the BiLSTM or LSTM deep neural network % The DNN is trained for the selected subcarrier based on the training data. %% Clear workspace clear variables; close all; %% Load training and validation data load('TrainingData_sc64_pil4_cp16.mat'); load('ValidationData_sc64_pil4_cp16.mat'); %% Define training parameters MiniBatchSize = 1000; MaxEpochs = 1000; InputSize = 2*NumOFDMsym*NumSC; NumHiddenUnits = 16; NumClass = length(Label); %% Form BiLSTM or LSTM DNN layers Layers = [ ... sequenceInputLayer(InputSize) lstmLayer(NumHiddenUnits,'OutputMode','last') % bilstmLayer(NumHiddenUnits,'OutputMode','last') fullyConnectedLayer(NumClass) softmaxLayer sseClassificationLayer('sse')];%classificationLayer maeClassificationLayer('mae') %% Define trainig options Options = trainingOptions('adam',... 'InitialLearnRate',0.01,... 'ValidationData',{XValid,YValid}, ... 'ExecutionEnvironment','auto', ... 'GradientThreshold',1, ... 'LearnRateDropFactor',0.1,... 'MaxEpochs',MaxEpochs, ... 'MiniBatchSize',MiniBatchSize, ... 'Shuffle','every-epoch', ... 'Verbose',0,... 'Plots','training-progress'); %% Train DNN [Net,tr] = trainNetwork(XTrain,YTrain,Layers,Options); LOSS=tr.TrainingLoss; Accuracy=tr.TrainingAccuracy; Rate=tr.BaseLearnRate; %displaying menu bar for training figure h= findall(groot,'Type','Figure'); h.MenuBar = 'figure'; %plotting the loss and accuracy curves separately figure(1) plot(smooth(Accuracy),'LineWidth', 2) xlabel('Iterations'), ylabel('Accuracy (%)') title('Accuracy of LSTM@4-pilots using adam & SSE') legend('Accuracy of LSTM@4-pilots using adam & SSE') grid on figure(2) plot(smooth(LOSS),'LineWidth', 2) xlabel('Iterations'), ylabel('Loss') title('Loss of LSTM@4-pilots using adam & SSE') legend('Loss of LSTM@4-pilots using adam & SSE') grid on %% Save the DNN % save('TrainedLSTM_sc64_pil4_cp16_adam_SSE','Net','MiniBatchSize','LOSS','Accuracy');