clear clc %% data import result = xlsread('Ceemdan decomposition results.xlsx'); %% Parameter setting num_samples = length(result); % number of samples kim = 30; % time lag zim = 1; % Step length %% Dataset partitioning for i = 1: num_samples - kim - zim + 1 res(i, :) = [reshape(result(i: i + kim - 1), 1, kim), result(i + kim + zim - 1)]; end outdim = 1; % The last column is output num_size = 0.7; % The proportion of training set to dataset num_train_s = round(num_size * num_samples); % Number of training set samples f_ = size(res, 2) - outdim; % Input feature dimension %% Divide dataset into the training set and test set P_train = res(1: num_train_s, 1: f_)'; T_train = res(1: num_train_s, f_ + 1: end)'; M = size(P_train, 2); P_test = res(num_train_s + 1: end, 1: f_)'; T_test = res(num_train_s + 1: end, f_ + 1: end)'; N = size(P_test, 2); %% data normalization [p_train, ps_input] = mapminmax(P_train, 0, 1); p_test = mapminmax('apply', P_test, ps_input); [t_train, ps_output] = mapminmax(T_train, 0, 1); t_test = mapminmax('apply', T_test, ps_output); %% format conversion for i = 1 : M vp_train{i, 1} = p_train(:, i); vt_train{i, 1} = t_train(:, i); end for i = 1 : N vp_test{i, 1} = p_test(:, i); vt_test{i, 1} = t_test(:, i); end %% Create an optimization function ObjFcn = @CostFunction; %% Optimize algorithm parameter settings SearchAgents_no = 25; % population size Max_iteration = 20; % Maximum number of iterations dim = 3; % Optimize the number of parameters lb = [1e-4, 5, 1e-6]; % Lower bound of parameter value ub = [1e-2, 50, 1e-1]; % Upper bound of parameter value fobj = @CostFunction; [Best_pos,Best_score,curve]=SO(SearchAgents_no,Max_iteration,lb,ub,dim,fobj); %% the optimal parameter InitialLearnRate = Best_pos(1, 1); % Optimal initial learning rate NumOfUnits =Best_pos(1, 2) % hidden layer node L2Regularization = Best_pos(1, 3)% Optimal L2 regularization coefficient %% Create a network layers = [ ... sequenceInputLayer(f_) % input layer bilstmLayer(NumOfUnits) % BILSTM layer reluLayer % Relu activation layer fullyConnectedLayer(outdim) % regression layer regressionLayer]; % hyper parameters setting options = trainingOptions('adam', ... % Adam 'MaxEpochs', 600, ... % Maximum training times 'GradientThreshold', 1, ... % gradient threshold 'InitialLearnRate', InitialLearnRate, ... % initial learning rate 'LearnRateSchedule', 'piecewise', ... % Learning rate adjustment 'LearnRateDropPeriod', 850, ... 'LearnRateDropFactor',0.2, ... % Learning Rate Adjustment Factor 'L2Regularization', L2Regularization, ... % regularization parameter 'ExecutionEnvironment', 'cpu',... 'Verbose', 0, ... 'Plots', 'none'); %% training net = trainNetwork(vp_train, vt_train, layers, options); %% forecasting t_sim1 = predict(net, vp_train); t_sim2 = predict(net, vp_test); %% Data anti-normalization T_sim1 = mapminmax('reverse', t_sim1, ps_output); T_sim2 = mapminmax('reverse', t_sim2, ps_output); %% format conversion T_sim1 = cell2mat(T_sim1); T_sim2 = cell2mat(T_sim2); %% RMSE error1 = sqrt(sum((T_sim1' - T_train).^2) ./ M); error2 = sqrt(sum((T_sim2' - T_test ).^2) ./ N); %% plotting figure plot(1: M, T_train, 'r-*', 1: M, T_sim1, 'b-o', 'LineWidth', 1) legend('true value ', ' predicted value ') xlabel('Prediction sample') ylabel('Predicted results ') string = {'Comparison of training set prediction results'; ['RMSE=' num2str(error1)]}; title(string) xlim([1, M]) grid figure plot(1: N, T_test, 'r-*', 1: N, T_sim2, 'b-o', 'LineWidth', 1) legend('true value ', ' predicted value ') xlabel('Prediction sample') ylabel('Predicted results ') string = {'Comparison of training set prediction results'; ['RMSE=' num2str(error2)]}; title(string) xlim([1, N]) grid % R2 R1 = 1 - norm(T_train - T_sim1')^2 / norm(T_train - mean(T_train))^2; R2 = 1 - norm(T_test - T_sim2')^2 / norm(T_test - mean(T_test ))^2; disp(['The R2 of the training set data is:', num2str(R1)]) disp(['The R2 of the testing set data is:', num2str(R2)]) % MAE mae1 = sum(abs(T_sim1' - T_train)) ./ M ; mae2 = sum(abs(T_sim2' - T_test )) ./ N ; disp(['The MAE of the training set data is:', num2str(mae1)]) disp(['The MAE of the testing set data is:', num2str(mae2)])