% Extract YTrain from tblTrain YTrain = tblTrain.Class; % Extract YValidation from tblValidation YValidation = tblValidation.Class; % Extract YTest from tblTest YTest = tblTest.Class; layer = 'fc_3'; featuresTrain = activations(net,tblTrain,layer,'OutputAs','rows','ExecutionEnvironment','cpu'); featuresValidation = activations(net,tblValidation,layer,'OutputAs','rows','ExecutionEnvironment','cpu'); featuresTest = activations(net,tblTest,layer,'OutputAs','rows','ExecutionEnvironment','cpu'); trainData=[featuresTrain;featuresValidation]; %trainData= reshape(trainData, [49288, 1]); testData=featuresTest; %testData= reshape(testData, [8700, 1]); YTrain = [YTrain;YValidation]; YTrain = grp2idx(YTrain); YTest = grp2idx(YTest);%testing data trainlabel=YTrain; %training labels testlabel=YTest; %classes attributesData=trainData; %wine data % for ii=1:size(attributesData,2) %normalize the data % attributesData(:,ii)=normalize(attributesData(:,ii)); % end X = [trainData;testData]; % Features Y = [YTrain;YTest]; % Outputs - converted to categorical % Split train and test data [train, ~, test] = dividerand(length(Y), 0.8, 0,0.2); X_train = X(train, :); Y_train = Y(train); X_test = X(test, :); Y_test = Y(test); % Adjust data linear = templateSVM('KernelFunction','linear'); linear_svm = @(x, y)fitcecoc(x, y, 'Learners', linear); gaussian = templateSVM('KernelFunction','gaussian'); gaussian_svm = @(x, y)fitcecoc(x, y, 'Learners', gaussian); knn1 = @(x, y)fitcknn(x, y, 'NumNeighbors', 1); knn3 = @(x, y)fitcknn(x, y, 'NumNeighbors', 3); knn5 = @(x, y)fitcknn(x, y, 'NumNeighbors', 5); tree = @(x, y)fitctree(x, y); Bayes= @(x, y)fitcnb(x,y); discriminant=@(x, y)fitcdiscr(x,y); %learners = {linear_svm, gaussian_svm, knn1, knn3, knn5, tree,Bayes,discriminant}; learners = {linear_svm, gaussian_svm, knn1, knn3, knn5, tree,Bayes}; %% Single Model mdl = tree(X_train, Y_train); % Train Model y_ens = mdl.predict(X_test); % Predict Y_test1 = grp2idx(Y_test); y_ens = grp2idx(y_ens); [c_matrixp333,Result233]= confusion.getMatrix(Y_test1,y_ens);fprintf("---------------------\n"); fprintf("Decision Tree: %.2f%%\n", ... 100 * sum(y_ens == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl1666 = knn5(X_train, Y_train); % Train Model y_ens199 = mdl1666.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens199 = grp2idx(y_ens199); [c_matrixp3335,Result2335]= confusion.getMatrix(Y_test1,y_ens199); % Print result fprintf("---------------------\n"); fprintf("Decision knn5: %.2f%%\n", ... 100 * sum(y_ens199 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl2 = knn3(X_train, Y_train); % Train Model y_ens28 = mdl2.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens28 = grp2idx(y_ens28); [c_matrixp33356,Result23356]= confusion.getMatrix(Y_test1,y_ens28); % Print result fprintf("---------------------\n"); fprintf("Decision knn3: %.2f%%\n", ... 100 * sum(y_ens28 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl3 = knn1(X_train, Y_train); % Train Model y_ens380 = mdl3.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens380 = grp2idx(y_ens380); [c_matrixp33359680,Result23356980]= confusion.getMatrix(Y_test1,y_ens380); % Print result fprintf("---------------------\n"); fprintf("Decision knn1: %.2f%%\n", ... 100 * sum(y_ens380 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl4 = gaussian_svm(X_train, Y_train); % Train Model y_ens4900 = mdl4.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens4900 = grp2idx(y_ens4900); [c_matrixp33359786,Result2335699]= confusion.getMatrix(Y_test1,y_ens4900);% Print result fprintf("---------------------\n"); fprintf("Decision gaussian_svm: %.2f%%\n", ... 100 * sum(y_ens4900 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl5 =linear_svm(X_train, Y_train); % Train Model y_ens59009 = mdl5.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens59009 = grp2idx(y_ens59009); [c_matrixp33359606,Result2335609]= confusion.getMatrix(Y_test1,y_ens59009);% Print result % Print result fprintf("---------------------\n"); fprintf("Decision linear_svm: %.2f%%\n", ... 100 * sum(y_ens59009 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl6 =Bayes(X_train, Y_train); % Train Model y_ens6908 = mdl6.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens6908 = grp2idx(y_ens6908); [c_matrixp33300596,Result27033569]= confusion.getMatrix(Y_test1,y_ens6908);% Print result % Print result fprintf("---------------------\n"); fprintf("Decision Bayes: %.2f%%\n", ... 100 * sum(y_ens6908 == Y_test) / length(Y_test)); fprintf("---------------------\n"); mdl7 =discriminant(X_train, Y_train); % Train Model y_ens7900 = mdl7.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_ens7900 = grp2idx(y_ens7900); [c_matrixp333596,Result233569]= confusion.getMatrix(Y_test1,y_ens7900);% Print result % Print result fprintf("---------------------\n"); fprintf("Decision discriminant: %.2f%%\n", ... 100 * sum(y_ens7900 == Y_test) / length(Y_test)); fprintf("---------------------\n"); learners = {tree, knn5}; ensggg = classification_ensemble(learners); % Initialize Ensemble ensggg = ensggg.fit(X_train, Y_train); % Train Ensemble y_enshhh = ensggg.predict(X_test); % Predict %Y_test1 = grp2idx(Y_test); y_enshhh = grp2idx(y_enshhh); [c_matrixp,Result2]= confusion.getMatrix(Y_test1,y_enshhh);