clear all display 'start....' %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%% We create our arbitary CNN and train it with MNIST dataset %%%%% The architecture of CNN is arbitrarily choosen for experimental purpose %%%%% The architecture may be revised for better result. % cnnAddConvLayer - Add convolution layer % cnn, no_of_feature_maps, sizeof(kernels), activation function -'sigm' % for sigmoid, 'tanh' for tanh, 'rect' for ReLu, 'soft' for softmax, % 'none' for none, 'plus' for softplus. % cnnAddPoolLayer - Add Pool layer % cnn, subsampling factor, subsampling type. Presently only 'mean' % subsampling is implemented. %cnnAddFCLayer - Add fully connected neural network layer % cnn, no of NN nodes, activation function. %% xx = xx - mean(xx(:)); %predicting on raw signals of EEG data ................ % load('trainedClassifier_guassiansvm.mat', 'class'); % load('trainedClassifier_guassiansvm.mat', 'features'); %load('C:\Users\d035190\Documents\MATLAB\EEG_sanam\EEG_sanam\EEGeye1.mat') inputtrain= inputdata(1:120000,:); inputtest= inputdata(120000:end,:); outputtrain= outputrain3hour(1:120000,1); outputtest= outputrain3hour(120000:end,1); train_x=inputtrain'; train_y=outputtrain'; % % train_x= x(1:14,1:14000); % train_y= x(15,1:14000); % % test_x= x(1:14,14001:end); % test_y= x(15,14001:end); % initialize cnn cnn.namaste=1; % just intiationg cnn object % [x,t] = cancer_dataset; % train_x=x(:,1:500); % train_y=t(:,1:500); % % % test_x=x(:,500:end); % test_y=t(:,500:end); h=size(train_x,1); w= 1; % initializing the cnn with input layer......... cnn=initcnn(cnn,[h w]); %....adding 1 con layer cnn=cnnAddConvLayer(cnn, 3, [3 1], 'rect'); % adding pooling layer ....................... cnn=cnnAddPoolLayer(cnn, 1, 'mean'); % cnn=cnnAddConvLayer(cnn, 10, [3 1], 'sigm'); cnn=cnnAddPoolLayer(cnn, 1, 'mean'); % cnn=cnnAddConvLayer(cnn, 12, [9 1], 'sigm'); % cnn=cnnAddPoolLayer(cnn, 1, 'mean'); % adding fully connected layers......... cnn=cnnAddFCLayer(cnn,5, 'tanh' ); %add fully connected layer cnn=cnnAddFCLayer(cnn,1, 'none' ); %add fully connected layer % last layer no of nodes = no of lables %% % below is an another example of CNN that can be used. % Example 1 % cnn=cnnAddConvLayer(cnn, 10, [7 7], 'tanh'); % cnn=cnnAddPoolLayer(cnn, 2, 'mean'); %cnn, subsampling factor % cnn=cnnAddConvLayer(cnn, 15, [3 3], 'tanh'); % cnn=cnnAddPoolLayer(cnn, 3, 'mean'); % cnn=cnnAddFCLayer(cnn,150, 'tanh' ); %add fully connected layer % cnn=cnnAddFCLayer(cnn,10, 'sigm' ); %add fully connected layer % %% Example 2 % % cnn=cnnAddConvLayer(cnn, 40, [5 5], 'sigm'); % cnn=cnnAddPoolLayer(cnn, 3, 'mean'); % cnn=cnnAddConvLayer(cnn, 50, [3 3], 'sigm'); % cnn=cnnAddPoolLayer(cnn, 2, 'mean'); % cnn=cnnAddFCLayer(cnn,90, 'sigm' ); %add fully connected layer % cnn=cnnAddFCLayer(cnn,10, 'sigm' ); %add fully connected layer %% %%% Example 3 % cnn=cnnAddConvLayer(cnn, 10, [9 1], 'sigm'); % cnn=cnnAddPoolLayer(cnn, 2, 'mean'); % cnn=cnnAddConvLayer(cnn, 20, [3 3], 'tanh'); % %cnn=cnnAddPoolLayer(cnn, 2, 'mean'); % cnn=cnnAddFCLayer(cnn,150, 'tanh' ); %add fully connected layer % cnn=cnnAddFCLayer(cnn,10, 'sigm' ); %add fully connected layer % last layer no of nodes = no of lables %% %%%more parameters %cnn.loss_func = 'cros'; cnn.loss_func = 'quad'; no_of_epochs = 550; batch_size=5; display 'training started...Wait for ~200 seconds...' tic cnn=traincnn(cnn,train_x,train_y, no_of_epochs,batch_size); toc display '...training finished.' display 'testing started....' tic err=testcnn(cnn, test_x, test_y); toc display '... testing finished. To get minimum error, increase no of epochs while training.'