function [LowestError] = code(path,file,numExecution,numIterations,generations,populationSize,crossProb,mutationProb); global data global classLabels [data,text] = xlsread(file); % Use the presence as class labels classLabels = data(:,2); data(:,1:4) = []; %Normalize data = mapminmax(data')'; text = text(1,7:end); [rowsData,colsData] = size(data); accumulatedResults = zeros(1,colsData); GenomeLength = colsData; % This is the number of features in the dataset nVars = colsData; FitnessFcn = @FitFunc_ELM_CV; % Values for the genetic search parameters during experimentation % Number of generations {3, 5, 10, 15, 20} % Population size {10, 15, 20, 30} % Crossover probability {0.3, 0.6, 0.75, 0.9} % Mutation probability {0.033, 0.06, 0.1} options = gaoptimset('CreationFcn', {@PopFunction},... 'PopulationSize',populationSize,... 'Generations',generations,... 'PopulationType', 'bitstring',... 'SelectionFcn',{@selectiontournament,10},... 'MutationFcn',{@mutationuniform, mutationProb},... 'CrossoverFcn', {@crossoverarithmetic,crossProb},... 'EliteCount',8,... 'StallGenLimit',100,... 'PlotFcns',{},... %% @gaplotbestf 'Display', 'off'); for i=1:numIterations, disp(strcat('Iteracion numero: ',num2str(i))); [chromosome,fval,exitflag,output,population,scores]= ga(FitnessFcn,nVars,options); accumulatedResults = accumulatedResults + chromosome; ErrorAveragePopulation(1,i) = i; ErrorAveragePopulation(2,i) = mean(scores); ErrorAveragePopulation(3,i) = std(scores); ErrorBestIndividuals(1,i) = min(scores); BestIndividual(:,i) = chromosome'; end LowestError = min(ErrorBestIndividuals); filename = [path,'\salida_Execution',num2str(numExecution),'.xlsx']; text1 = {'Error Best Individual';'# Iterations';'# Generations';'Population Size';'Crossover Probability';'Mutation Probability'}; num1 = [LowestError; numIterations;generations;populationSize;crossProb;mutationProb]; output1 = [text1, num2cell(num1)]; output2 = [text', num2cell(accumulatedResults*100/i)']; num3 = [ErrorAveragePopulation ; ErrorBestIndividuals; BestIndividual; sum(BestIndividual)]; [rowsNum3,colsNum3] = size(num3); text3 = cell(rowsNum3,1); text3{1,1}='# Iteration'; text3{2,1}='Mean Error'; text3{3,1}='Std Deviation Error'; text3{4,1}='Error Best Individual'; text3{5,1}='Best Individual'; output3 = [text3, num2cell(num3)]; xlswrite(filename,output3,'Iterations'); xlswrite(filename,output2,'Feature Selection'); xlswrite(filename,output1,'Hoja1'); end %%% POPULATION FUNCTION function [pop] = PopFunction(GenomeLength,~,options) RD = rand; pop = (rand(options.PopulationSize, GenomeLength)> RD); % Initial Population end function [MissclassLabelsificationRate] = FitFunc_ELM_CV(pop) global data global classLabels %%%%%%%%%%% Load training dataset indexes = logical(pop); T = data(:,indexes); P = classLabels; cp = cvpartition(classLabels,'k',10); MissclassLabelsificationRate = crossval('mcr',T,classLabels,'Predfun',@ELM_FS,'partition',cp); end % function [TY] = ELM_FS(XTRAIN,ytrain,XTEST) % TY = classLabelsify(XTEST,XTRAIN,ytrain); % end function [Y_classLabels] = ELM_FS(XTRAIN,ytrain,XTEST) % Usage: elm_train(TrainingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction) % OR: [TrainingTime, TrainingAccuracy] = elm_train(TrainingData_File, Elm_Type, NumberofHiddenNeurons, ActivationFunction) % % Input: % TrainingData_File - Filename of training data set % Elm_Type - 0 for regression; 1 for (both binary and multi-classLabelses) classLabelsification % NumberofHiddenNeurons - Number of hidden neurons assigned to the ELM % ActivationFunction - Type of activation function: % 'sig' for Sigmoidal function % 'sin' for Sine function % 'hardlim' for Hardlim function % % Output: % TrainingTime - Time (seconds) spent on training ELM % TrainingAccuracy - Training accuracy: % RMSE for regression or correct classLabelsification rate for classLabelsification % % MULTI-classLabelsE classLabelsIFICATION: NUMBER OF OUTPUT NEURONS WILL BE AUTOMATICALLY SET EQUAL TO NUMBER OF classLabelsES % FOR EXAMPLE, if there are 7 classLabelses in all, there will have 7 output % neurons; neuron 5 has the highest output means input belongs to 5-th classLabels % % Sample1 regression: [TrainingTime, TrainingAccuracy, TestingAccuracy] = elm_train('sinc_train', 0, 20, 'sig') % Sample2 classLabelsification: elm_train('diabetes_train', 1, 20, 'sig') % %%%% Authors: MR QIN-YU ZHU AND DR GUANG-BIN HUANG %%%% NANYANG TECHNOLOGICAL UNIVERSITY, SINGAPORE %%%% EMAIL: EGBHUANG@NTU.EDU.SG; GBHUANG@IEEE.ORG %%%% WEBSITE: http://www.ntu.edu.sg/eee/icis/cv/egbhuang.htm %%%% DATE: APRIL 2004 %%%%%%%%%%% Macro definition REGRESSION=0; classLabelsIFIER=1; Elm_Type = 1; NumberofHiddenNeurons=200; ActivationFunction='hardlim'; % 'sig','sin','hardlim' %%%%%%%%%%% Load training data T = double(ytrain)'; P = XTRAIN'; NumberofTrainingData=size(P,2); NumberofInputNeurons=size(P,1); if Elm_Type~=REGRESSION %%%%%%%%%%%% Preprocessing the data of classLabelsification sorted_target=sort(T,2); label=zeros(1,1); % Find and save in 'label' classLabels label from training and testing data sets label(1,1)=sorted_target(1,1); j=1; for i = 2:NumberofTrainingData if sorted_target(1,i) ~= label(1,j) j=j+1; label(1,j) = sorted_target(1,i); end end number_classLabels=j; NumberofOutputNeurons=number_classLabels; if NumberofOutputNeurons~=2 error('Hay mas de 2 neuronas de salida'); end %%%%%%%%%% Processing the targets of training temp_T=zeros(NumberofOutputNeurons, NumberofTrainingData); for i = 1:NumberofTrainingData for j = 1:number_classLabels if label(1,j) == T(1,i) break; end end temp_T(j,i)=1; end T=temp_T*2-1; end % end if of Elm_Type %%%%%%%%%%% Calculate weights & biases start_time_train=cputime; %%%%%%%%%%% Random generate input weights InputWeight (w_i) and biases BiasofHiddenNeurons (b_i) of hidden neurons InputWeight=rand(NumberofHiddenNeurons,NumberofInputNeurons)*2-1; BiasofHiddenNeurons=rand(NumberofHiddenNeurons,1); tempH=InputWeight*P; clear P; % Release input of training data ind=ones(1,NumberofTrainingData); BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of H tempH=tempH+BiasMatrix; %%%%%%%%%%% Calculate hidden neuron output matrix H switch lower(ActivationFunction) case {'sig','sigmoid'} %%%%%%%% Sigmoid H = 1 ./ (1 + exp(-tempH)); case {'sin','sine'} %%%%%%%% Sine H = sin(tempH); case {'hardlim'} %%%%%%%% Hard Limit H = hardlim(tempH); %%%%%%%% More activation functions can be added here end clear tempH; % Release the temparary array for calculation of hidden neuron output matrix H %%%%%%%%%%% Calculate output weights OutputWeight (beta_i) OutputWeight=pinv(H') * T'; % implementation without regularization factor //refer to 2006 Neurocomputing paper %OutputWeight=inv(eye(size(H,1))/C+H * H') * H * T'; % faster method 1 //refer to 2012 IEEE TSMC-B paper %implementation; one can set regularizaiton factor C properly in classLabelsification applications %OutputWeight=(eye(size(H,1))/C+H * H') \ H * T'; % faster method 2 //refer to 2012 IEEE TSMC-B paper %implementation; one can set regularizaiton factor C properly in classLabelsification applications end_time_train=cputime; TrainingTime=end_time_train-start_time_train; % Calculate CPU time (seconds) spent for training ELM %%%%%%%%%%% Calculate the training accuracy Y=(H' * OutputWeight)'; % Y: the actual output of the training data % if Elm_Type == REGRESSION % TrainingAccuracy=sqrt(mse(T - Y)) % Calculate training accuracy (RMSE) for regression case % output=Y; % end % clear H; % % if Elm_Type == classLabelsIFIER % %%%%%%%%%% Calculate training & testing classLabelsification accuracy % MissclassLabelsificationRate_Training=0; % % for i = 1 : size(T, 2) % [x, label_index_expected]=max(T(:,i)); % [x, label_index_actual]=max(Y(:,i)); % output(i)=label(label_index_actual); % if label_index_actual~=label_index_expected % MissclassLabelsificationRate_Training=MissclassLabelsificationRate_Training+1; % end % end % TrainingAccuracy=1-MissclassLabelsificationRate_Training/NumberofTrainingData % end %%%%%%%%%%%%%%%%%%%%%%%%%%%%%% PREDICTION TV.P = XTEST'; %TV.T=test_data(:,1)'; %TV.P=test_data(:,2:size(test_data,2))'; NumberofTestingData=size(TV.P,2); % if Elm_Type~=REGRESSION % % %%%%%%%%%% Processing the targets of testing % temp_TV_T=zeros(NumberofOutputNeurons, NumberofTestingData); % for i = 1:NumberofTestingData % for j = 1:size(label,2) % if label(1,j) == TV.T(1,i) % break; % end % end % temp_TV_T(j,i)=1; % end % TV.T=temp_TV_T*2-1; % % end % end if of Elm_Type %%%%%%%%%%% Calculate the output of testing input start_time_test=cputime; tempH_test=InputWeight*TV.P; clear TV.P; % Release input of testing data ind=ones(1,NumberofTestingData); BiasMatrix=BiasofHiddenNeurons(:,ind); % Extend the bias matrix BiasofHiddenNeurons to match the demention of H tempH_test=tempH_test + BiasMatrix; switch lower(ActivationFunction) case {'sig','sigmoid'} %%%%%%%% Sigmoid H_test = 1 ./ (1 + exp(-tempH_test)); case {'sin','sine'} %%%%%%%% Sine H_test = sin(tempH_test); case {'hardlim'} %%%%%%%% Hard Limit H_test = hardlim(tempH_test); %%%%%%%% More activation functions can be added here end TY=(H_test' * OutputWeight)'; % TY: the actual output of the testing data end_time_test=cputime; TestingTime=end_time_test-start_time_test; % Calculate CPU time (seconds) spent by ELM predicting the whole testing data if Elm_Type == classLabelsIFIER %%%%%%%%%% Calculate training & testing classLabelsification accuracy MissclassLabelsificationRate_Testing=0; for i = 1 : size(TV.P, 2) [x, label_index_actual]=max(TY(:,i)); output(i)=label(label_index_actual); end TestingAccuracy=1-MissclassLabelsificationRate_Testing/NumberofTestingData; end Y_classLabels = (output-1)'; % size(Y_classLabels) % min(Y_classLabels) % max(Y_classLabels) % sum(sum(Y_classLabels)) % pause end