% Generate training data xunlian=importdata('C:\Users\HP\Desktop\training data.mat'); jiance = importdata('C:\Users\HP\Desktop\monitor data.mat'); %% filtering c=2; d=13; data=jiance(50:end,c:d); data=table2array(data); data0=jiance(:,1); windowSize = 430; stride = 10; numWindows = floor((size(data, 1) - windowSize) / stride) + 1; for i1 = 1:numWindows startIndex = (i1 - 1) * stride + 1; endIndex = startIndex + windowSize - 1; windowData = data(startIndex:endIndex, :); x=data0(startIndex:endIndex,:); n=table2array(x); x= datetime(n); window_size =15; step = 1; filtered_data = zeros(size(windowData)); for col = 1 : size(windowData, 2) column_data = windowData(:, col); for i = 1 : step : size(windowData, 1) - window_size + 1 window = column_data(i : i + window_size - 1); q1 = quantile(window, 0.25); q3 = quantile(window, 0.75); middle_data = window(window >= q1 & window <= q3); mean_value = mean(middle_data); threshold = mean_value + (window - mean_value) * 1.483; outliers = window < threshold | window > threshold; window(outliers) = mean_value; filtered_data(i : i + window_size - 1,col) = window; end end %% predict qq=1; ww=400; ee=400; rr=430; tt=0; data1=filtered_data(:,1:12)'; dataTrain1=data1(:,qq:rr); dataTrain = data1(:,qq:ee); dataTest = data1(:,ee:rr); mu = mean(dataTrain,2); sig = std(dataTrain,0,2); dataTrainStandardized = (dataTrain - mu) ./ sig; XTrain = dataTrainStandardized(:,1:end-1); YTrain = dataTrainStandardized(:,2:end); %% LSTM Network Training numFeatures = numel(XTrain (:,1)); numResponses = numel(XTrain (:,1)) ; numHiddenUnits = 200; layers = [ ... sequenceInputLayer(numFeatures) lstmLayer(numHiddenUnits) fullyConnectedLayer(numResponses) regressionLayer]; options = trainingOptions('adam', ... 'MaxEpochs',500, ... 'GradientThreshold',2, ... 'InitialLearnRate',0.01, ... 'LearnRateSchedule','piecewise', ... 'LearnRateDropPeriod',259, ... 'LearnRateDropFactor',0.1, ... 'Verbose',0, ... 'Plots','training-progress'); net = trainNetwork(XTrain,YTrain,layers,options); %% Neural Network Initialization net = predictAndUpdateState(net,XTrain); [net,YPred] = predictAndUpdateState(net,YTrain(:,end)); % for i = 2:(rr-ww+tt+1) for i = 2:(rr-ee+tt+1) [net,YPred(:,i)] = predictAndUpdateState(net,YPred(:,i-1),'ExecutionEnvironment','cpu'); end %% Make data predictions for validating neural networks (update network state with predicted values) % YPred = repmat(sig, size(YPred, 1), 1) .* YPred + repmat(mu, size(YPred, 1), 1); YPred = sig.* YPred+mu ; rmse = sqrt(mean((YPred(:,1:rr-ee+1)-dataTest).^2,2)) ; rmse1 = sqrt(mean((YPred(:,1:rr-ee+1)-dataTest).^2)) ; rmse2 = sqrt(mean((YPred(:,rr-ee:rr-ee+tt)-dataTrain1(rr:rr+tt)).^2,2)) ; figure subplot(2,1,1) % xx=qq:ww; plot(dataTrain1(:,1:end)','-','DisplayName', 'Observed') hold on idx = ee:rr; %为横坐标 plot(idx,YPred(:,1:(rr-ee+1)),'.-','DisplayName', 'Forecast') % idx = rr:rr+tt; %为横坐标 % plot(idx,YPred(:,1:(tt+1)),'.-') %显示预测值 hold off % plot(app.UIAxes5,xx,dataTrain(:,1:end),idx,YPred(:,1:(rr-ee+1)),'.-') xlabel("Time") ylabel("Output variables") title("Forecast") legend('show'); % legend(["Observed" "Forecast"]) subplot(2,1,2) plot(data1') xlabel("Time") ylabel("Output variables") title("Dataset") figure idx = ee:(rr+tt); plot(idx,YPred(:,1:(rr-ee+tt+1)),'.-') %% data assignment aa=xunlian(:,c:d); x=table2array(aa); windowSize1 = 100; stride = 1; ii1 = jiance(400:700, c:d); ii1 = table2array(ii1); numWindows1 = floor((size(ii1, 1) - windowSize1) / stride) + 1; % windows = cell(numWindows1, 1); for w = 1:numWindows1 startIndex1 = (w - 1) * stride + 1; endIndex1 = startIndex1 + windowSize1 - 1; if endIndex1 > size(ii1, 1) continue; end windows{w} = ii1(startIndex1:endIndex1, :); ii1=windows{w}; ii2=YPred(:,1:(rr-ee))'; % ii2=YPred(:,rr-ee:(rr-ee+tt))'; % xx=ii1 xx=vertcat(ii1,ii2); xx_train=[x]; xx_test=[xx]; % xx_test(51:100,2)=xx_test(51:100,2)+15*ones(50,1); Xtrain=xx_train; Xtest=xx_test; X_mean=mean(Xtrain); X_std=std(Xtrain); [X_row, X_col]=size(Xtrain); Xtrain=(Xtrain-repmat(X_mean,X_row,1))./repmat(X_std,X_row,1); %% 3.PCA dimensionality reduction SXtrain = cov(Xtrain); [T,lm]=eig(SXtrain); D=flipud(diag(lm)); num=1; while sum(D(1:num))/sum(D)<0.85 num = num+1; end P = T(:,X_col-num+1:X_col); P_=fliplr(P); %% 4. Calculate the limits for T2 and Q T2UCL1=num*(X_row-1)*(X_row+1)*finv(0.99,num,X_row - num)/(X_row*(X_row - num)); % is found with a 99% Q statistical control limit for i = 1:3 th(i) = sum((D(num+1:X_col)).^i); end h0 = 1 - 2*th(1)*th(3)/(3*th(2)^2); ca = norminv(0.99,0,1); QU = th(1)*(h0*ca*sqrt(2*th(2))/th(1) + 1 + th(2)*h0*(h0 - 1)/th(1)^2)^(1/h0); %% 5. Model testing n = size(Xtest,1); Xtest=(Xtest-repmat(X_mean,n,1))./repmat(X_std,n,1); [r,y] = size(P*P'); I = eye(r,y); T2 = zeros(n,1); Q = zeros(n,1); lm_=fliplr(flipud(lm)); %T2的计算公式Xtest.T*P_*inv(S)*P_*Xtest for i = 1:n T2(i)=Xtest(i,:)*P_*inv(lm_(1:num,1:num))*P_'*Xtest(i,:)'; Q(i) = Xtest(i,:)*(I - P*P')*Xtest(i,:)'; end %% 6. Plot T2 and SPE plots figure('Name','PCA'); subplot(2,1,1); plot(1:i,T2(1:i),'k'); hold on; plot(i:n,T2(i:n),'k'); title('Graph of changes in statistics'); xlabel('Number of samples'); ylabel('T2'); hold on; line([0,n],[T2UCL1,T2UCL1],'LineStyle','--','Color','r'); % plot(app.UIAxes,1:i,T2(1:i),'k-',i:n,T2(i:n),'k-',[0,n],[T2UCL1,T2UCL1],'--r'); subplot(2,1,2); plot(1:i,Q(1:i),'k'); hold on; plot(i:n,Q(i:n),'k'); title('Graph of changes in statistics'); xlabel('Number of samples'); ylabel('SPE'); hold on; line([0,n],[QU,QU],'LineStyle','--','Color','r'); end end