{ "nbformat": 4, "nbformat_minor": 0, "metadata": { "colab": { "name": "DNN_on_pima.ipynb", "provenance": [] }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "cells": [ { "cell_type": "code", "metadata": { "colab": { "resources": { "http://localhost:8080/nbextensions/google.colab/files.js": { "data": "Ly8gQ29weXJpZ2h0IDIwMTcgR29vZ2xlIExMQwovLwovLyBMaWNlbnNlZCB1bmRlciB0aGUgQXBhY2hlIExpY2Vuc2UsIFZlcnNpb24gMi4wICh0aGUgIkxpY2Vuc2UiKTsKLy8geW91IG1heSBub3QgdXNlIHRoaXMgZmlsZSBleGNlcHQgaW4gY29tcGxpYW5jZSB3aXRoIHRoZSBMaWNlbnNlLgovLyBZb3UgbWF5IG9idGFpbiBhIGNvcHkgb2YgdGhlIExpY2Vuc2UgYXQKLy8KLy8gICAgICBodHRwOi8vd3d3LmFwYWNoZS5vcmcvbGljZW5zZXMvTElDRU5TRS0yLjAKLy8KLy8gVW5sZXNzIHJlcXVpcmVkIGJ5IGFwcGxpY2FibGUgbGF3IG9yIGFncmVlZCB0byBpbiB3cml0aW5nLCBzb2Z0d2FyZQovLyBkaXN0cmlidXRlZCB1bmRlciB0aGUgTGljZW5zZSBpcyBkaXN0cmlidXRlZCBvbiBhbiAiQVMgSVMiIEJBU0lTLAovLyBXSVRIT1VUIFdBUlJBTlRJRVMgT1IgQ09ORElUSU9OUyBPRiBBTlkgS0lORCwgZWl0aGVyIGV4cHJlc3Mgb3IgaW1wbGllZC4KLy8gU2VlIHRoZSBMaWNlbnNlIGZvciB0aGUgc3BlY2lmaWMgbGFuZ3VhZ2UgZ292ZXJuaW5nIHBlcm1pc3Npb25zIGFuZAovLyBsaW1pdGF0aW9ucyB1bmRlciB0aGUgTGljZW5zZS4KCi8qKgogKiBAZmlsZW92ZXJ2aWV3IEhlbHBlcnMgZm9yIGdvb2dsZS5jb2xhYiBQeXRob24gbW9kdWxlLgogKi8KKGZ1bmN0aW9uKHNjb3BlKSB7CmZ1bmN0aW9uIHNwYW4odGV4dCwgc3R5bGVBdHRyaWJ1dGVzID0ge30pIHsKICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnc3BhbicpOwogIGVsZW1lbnQudGV4dENvbnRlbnQgPSB0ZXh0OwogIGZvciAoY29uc3Qga2V5IG9mIE9iamVjdC5rZXlzKHN0eWxlQXR0cmlidXRlcykpIHsKICAgIGVsZW1lbnQuc3R5bGVba2V5XSA9IHN0eWxlQXR0cmlidXRlc1trZXldOwogIH0KICByZXR1cm4gZWxlbWVudDsKfQoKLy8gTWF4IG51bWJlciBvZiBieXRlcyB3aGljaCB3aWxsIGJlIHVwbG9hZGVkIGF0IGEgdGltZS4KY29uc3QgTUFYX1BBWUxPQURfU0laRSA9IDEwMCAqIDEwMjQ7CgpmdW5jdGlvbiBfdXBsb2FkRmlsZXMoaW5wdXRJZCwgb3V0cHV0SWQpIHsKICBjb25zdCBzdGVwcyA9IHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCk7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICAvLyBDYWNoZSBzdGVwcyBvbiB0aGUgb3V0cHV0RWxlbWVudCB0byBtYWtlIGl0IGF2YWlsYWJsZSBmb3IgdGhlIG5leHQgY2FsbAogIC8vIHRvIHVwbG9hZEZpbGVzQ29udGludWUgZnJvbSBQeXRob24uCiAgb3V0cHV0RWxlbWVudC5zdGVwcyA9IHN0ZXBzOwoKICByZXR1cm4gX3VwbG9hZEZpbGVzQ29udGludWUob3V0cHV0SWQpOwp9CgovLyBUaGlzIGlzIHJvdWdobHkgYW4gYXN5bmMgZ2VuZXJhdG9yIChub3Qgc3VwcG9ydGVkIGluIHRoZSBicm93c2VyIHlldCksCi8vIHdoZXJlIHRoZXJlIGFyZSBtdWx0aXBsZSBhc3luY2hyb25vdXMgc3RlcHMgYW5kIHRoZSBQeXRob24gc2lkZSBpcyBnb2luZwovLyB0byBwb2xsIGZvciBjb21wbGV0aW9uIG9mIGVhY2ggc3RlcC4KLy8gVGhpcyB1c2VzIGEgUHJvbWlzZSB0byBibG9jayB0aGUgcHl0aG9uIHNpZGUgb24gY29tcGxldGlvbiBvZiBlYWNoIHN0ZXAsCi8vIHRoZW4gcGFzc2VzIHRoZSByZXN1bHQgb2YgdGhlIHByZXZpb3VzIHN0ZXAgYXMgdGhlIGlucHV0IHRvIHRoZSBuZXh0IHN0ZXAuCmZ1bmN0aW9uIF91cGxvYWRGaWxlc0NvbnRpbnVlKG91dHB1dElkKSB7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICBjb25zdCBzdGVwcyA9IG91dHB1dEVsZW1lbnQuc3RlcHM7CgogIGNvbnN0IG5leHQgPSBzdGVwcy5uZXh0KG91dHB1dEVsZW1lbnQubGFzdFByb21pc2VWYWx1ZSk7CiAgcmV0dXJuIFByb21pc2UucmVzb2x2ZShuZXh0LnZhbHVlLnByb21pc2UpLnRoZW4oKHZhbHVlKSA9PiB7CiAgICAvLyBDYWNoZSB0aGUgbGFzdCBwcm9taXNlIHZhbHVlIHRvIG1ha2UgaXQgYXZhaWxhYmxlIHRvIHRoZSBuZXh0CiAgICAvLyBzdGVwIG9mIHRoZSBnZW5lcmF0b3IuCiAgICBvdXRwdXRFbGVtZW50Lmxhc3RQcm9taXNlVmFsdWUgPSB2YWx1ZTsKICAgIHJldHVybiBuZXh0LnZhbHVlLnJlc3BvbnNlOwogIH0pOwp9CgovKioKICogR2VuZXJhdG9yIGZ1bmN0aW9uIHdoaWNoIGlzIGNhbGxlZCBiZXR3ZWVuIGVhY2ggYXN5bmMgc3RlcCBvZiB0aGUgdXBsb2FkCiAqIHByb2Nlc3MuCiAqIEBwYXJhbSB7c3RyaW5nfSBpbnB1dElkIEVsZW1lbnQgSUQgb2YgdGhlIGlucHV0IGZpbGUgcGlja2VyIGVsZW1lbnQuCiAqIEBwYXJhbSB7c3RyaW5nfSBvdXRwdXRJZCBFbGVtZW50IElEIG9mIHRoZSBvdXRwdXQgZGlzcGxheS4KICogQHJldHVybiB7IUl0ZXJhYmxlPCFPYmplY3Q+fSBJdGVyYWJsZSBvZiBuZXh0IHN0ZXBzLgogKi8KZnVuY3Rpb24qIHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCkgewogIGNvbnN0IGlucHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKGlucHV0SWQpOwogIGlucHV0RWxlbWVudC5kaXNhYmxlZCA9IGZhbHNlOwoKICBjb25zdCBvdXRwdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQob3V0cHV0SWQpOwogIG91dHB1dEVsZW1lbnQuaW5uZXJIVE1MID0gJyc7CgogIGNvbnN0IHBpY2tlZFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgaW5wdXRFbGVtZW50LmFkZEV2ZW50TGlzdGVuZXIoJ2NoYW5nZScsIChlKSA9PiB7CiAgICAgIHJlc29sdmUoZS50YXJnZXQuZmlsZXMpOwogICAgfSk7CiAgfSk7CgogIGNvbnN0IGNhbmNlbCA9IGRvY3VtZW50LmNyZWF0ZUVsZW1lbnQoJ2J1dHRvbicpOwogIGlucHV0RWxlbWVudC5wYXJlbnRFbGVtZW50LmFwcGVuZENoaWxkKGNhbmNlbCk7CiAgY2FuY2VsLnRleHRDb250ZW50ID0gJ0NhbmNlbCB1cGxvYWQnOwogIGNvbnN0IGNhbmNlbFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgY2FuY2VsLm9uY2xpY2sgPSAoKSA9PiB7CiAgICAgIHJlc29sdmUobnVsbCk7CiAgICB9OwogIH0pOwoKICAvLyBXYWl0IGZvciB0aGUgdXNlciB0byBwaWNrIHRoZSBmaWxlcy4KICBjb25zdCBmaWxlcyA9IHlpZWxkIHsKICAgIHByb21pc2U6IFByb21pc2UucmFjZShbcGlja2VkUHJvbWlzZSwgY2FuY2VsUHJvbWlzZV0pLAogICAgcmVzcG9uc2U6IHsKICAgICAgYWN0aW9uOiAnc3RhcnRpbmcnLAogICAgfQogIH07CgogIGNhbmNlbC5yZW1vdmUoKTsKCiAgLy8gRGlzYWJsZSB0aGUgaW5wdXQgZWxlbWVudCBzaW5jZSBmdXJ0aGVyIHBpY2tzIGFyZSBub3QgYWxsb3dlZC4KICBpbnB1dEVsZW1lbnQuZGlzYWJsZWQgPSB0cnVlOwoKICBpZiAoIWZpbGVzKSB7CiAgICByZXR1cm4gewogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbXBsZXRlJywKICAgICAgfQogICAgfTsKICB9CgogIGZvciAoY29uc3QgZmlsZSBvZiBmaWxlcykgewogICAgY29uc3QgbGkgPSBkb2N1bWVudC5jcmVhdGVFbGVtZW50KCdsaScpOwogICAgbGkuYXBwZW5kKHNwYW4oZmlsZS5uYW1lLCB7Zm9udFdlaWdodDogJ2JvbGQnfSkpOwogICAgbGkuYXBwZW5kKHNwYW4oCiAgICAgICAgYCgke2ZpbGUudHlwZSB8fCAnbi9hJ30pIC0gJHtmaWxlLnNpemV9IGJ5dGVzLCBgICsKICAgICAgICBgbGFzdCBtb2RpZmllZDogJHsKICAgICAgICAgICAgZmlsZS5sYXN0TW9kaWZpZWREYXRlID8gZmlsZS5sYXN0TW9kaWZpZWREYXRlLnRvTG9jYWxlRGF0ZVN0cmluZygpIDoKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgJ24vYSd9IC0gYCkpOwogICAgY29uc3QgcGVyY2VudCA9IHNwYW4oJzAlIGRvbmUnKTsKICAgIGxpLmFwcGVuZENoaWxkKHBlcmNlbnQpOwoKICAgIG91dHB1dEVsZW1lbnQuYXBwZW5kQ2hpbGQobGkpOwoKICAgIGNvbnN0IGZpbGVEYXRhUHJvbWlzZSA9IG5ldyBQcm9taXNlKChyZXNvbHZlKSA9PiB7CiAgICAgIGNvbnN0IHJlYWRlciA9IG5ldyBGaWxlUmVhZGVyKCk7CiAgICAgIHJlYWRlci5vbmxvYWQgPSAoZSkgPT4gewogICAgICAgIHJlc29sdmUoZS50YXJnZXQucmVzdWx0KTsKICAgICAgfTsKICAgICAgcmVhZGVyLnJlYWRBc0FycmF5QnVmZmVyKGZpbGUpOwogICAgfSk7CiAgICAvLyBXYWl0IGZvciB0aGUgZGF0YSB0byBiZSByZWFkeS4KICAgIGxldCBmaWxlRGF0YSA9IHlpZWxkIHsKICAgICAgcHJvbWlzZTogZmlsZURhdGFQcm9taXNlLAogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbnRpbnVlJywKICAgICAgfQogICAgfTsKCiAgICAvLyBVc2UgYSBjaHVua2VkIHNlbmRpbmcgdG8gYXZvaWQgbWVzc2FnZSBzaXplIGxpbWl0cy4gU2VlIGIvNjIxMTU2NjAuCiAgICBsZXQgcG9zaXRpb24gPSAwOwogICAgZG8gewogICAgICBjb25zdCBsZW5ndGggPSBNYXRoLm1pbihmaWxlRGF0YS5ieXRlTGVuZ3RoIC0gcG9zaXRpb24sIE1BWF9QQVlMT0FEX1NJWkUpOwogICAgICBjb25zdCBjaHVuayA9IG5ldyBVaW50OEFycmF5KGZpbGVEYXRhLCBwb3NpdGlvbiwgbGVuZ3RoKTsKICAgICAgcG9zaXRpb24gKz0gbGVuZ3RoOwoKICAgICAgY29uc3QgYmFzZTY0ID0gYnRvYShTdHJpbmcuZnJvbUNoYXJDb2RlLmFwcGx5KG51bGwsIGNodW5rKSk7CiAgICAgIHlpZWxkIHsKICAgICAgICByZXNwb25zZTogewogICAgICAgICAgYWN0aW9uOiAnYXBwZW5kJywKICAgICAgICAgIGZpbGU6IGZpbGUubmFtZSwKICAgICAgICAgIGRhdGE6IGJhc2U2NCwKICAgICAgICB9LAogICAgICB9OwoKICAgICAgbGV0IHBlcmNlbnREb25lID0gZmlsZURhdGEuYnl0ZUxlbmd0aCA9PT0gMCA/CiAgICAgICAgICAxMDAgOgogICAgICAgICAgTWF0aC5yb3VuZCgocG9zaXRpb24gLyBmaWxlRGF0YS5ieXRlTGVuZ3RoKSAqIDEwMCk7CiAgICAgIHBlcmNlbnQudGV4dENvbnRlbnQgPSBgJHtwZXJjZW50RG9uZX0lIGRvbmVgOwoKICAgIH0gd2hpbGUgKHBvc2l0aW9uIDwgZmlsZURhdGEuYnl0ZUxlbmd0aCk7CiAgfQoKICAvLyBBbGwgZG9uZS4KICB5aWVsZCB7CiAgICByZXNwb25zZTogewogICAgICBhY3Rpb246ICdjb21wbGV0ZScsCiAgICB9CiAgfTsKfQoKc2NvcGUuZ29vZ2xlID0gc2NvcGUuZ29vZ2xlIHx8IHt9OwpzY29wZS5nb29nbGUuY29sYWIgPSBzY29wZS5nb29nbGUuY29sYWIgfHwge307CnNjb3BlLmdvb2dsZS5jb2xhYi5fZmlsZXMgPSB7CiAgX3VwbG9hZEZpbGVzLAogIF91cGxvYWRGaWxlc0NvbnRpbnVlLAp9Owp9KShzZWxmKTsK", "ok": true, "headers": [ [ "content-type", "application/javascript" ] ], "status": 200, "status_text": "" } }, "base_uri": "https://localhost:8080/", "height": 474 }, "id": "Qw0pwVfzQ4wg", "outputId": "aa2f88be-bbba-4d27-9cdb-05318b801eb0" }, "source": [ "from google.colab import files\n", "import pandas as pd\n", "uploaded = files.upload()\n", "import io \n", "dataset = pd.read_csv(io.BytesIO(uploaded['pimadiabetes.csv']))\n", "dataset" ], "execution_count": 6, "outputs": [ { "output_type": "display_data", "data": { "text/html": [ "\n", " \n", " \n", " Upload widget is only available when the cell has been executed in the\n", " current browser session. Please rerun this cell to enable.\n", " \n", " " ], "text/plain": [ "" ] }, "metadata": {} }, { "output_type": "stream", "text": [ "Saving pimadiabetes.csv to pimadiabetes (1).csv\n" ], "name": "stdout" }, { "output_type": "execute_result", "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
PregnanciesGlucoseBloodPressureSkinThicknessInsulinBMIDiabetesPedigreeFunctionAgeOutcome
061487235033.60.627501
11856629026.60.351310
28183640023.30.672321
318966239428.10.167210
40137403516843.12.288331
..............................
76310101764818032.90.171630
76421227027036.80.340270
7655121722311226.20.245300
7661126600030.10.349471
7671937031030.40.315230
\n", "

768 rows × 9 columns

\n", "
" ], "text/plain": [ " Pregnancies Glucose ... Age Outcome\n", "0 6 148 ... 50 1\n", "1 1 85 ... 31 0\n", "2 8 183 ... 32 1\n", "3 1 89 ... 21 0\n", "4 0 137 ... 33 1\n", ".. ... ... ... ... ...\n", "763 10 101 ... 63 0\n", "764 2 122 ... 27 0\n", "765 5 121 ... 30 0\n", "766 1 126 ... 47 1\n", "767 1 93 ... 23 0\n", "\n", "[768 rows x 9 columns]" ] }, "metadata": {}, "execution_count": 6 } ] }, { "cell_type": "code", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "8uHo_DrxRr5X", "outputId": "61a673ab-fb84-413f-c453-ead94055a648" }, "source": [ "#import all libraries\n", "\n", "import tensorflow as tf\n", "from numpy import loadtxt\n", "from keras.models import Sequential\n", "from keras.layers import Dense\n", "from sklearn.model_selection import train_test_split\n", "from sklearn import preprocessing\n", "\n", "X = dataset.iloc[:,0:8].values\n", "Y = dataset.iloc[:,8].values\n", "X = preprocessing.scale(X)\n", "x_train,x_test,y_train,y_test=train_test_split(X,Y,test_size=0.2, random_state=2)\n", "\n", "#define the keras models\n", "model = Sequential()\n", "model.add(Dense(16, input_dim=8, activation='relu'))\n", "model.add(Dense(14, activation='relu'))\n", "model.add(Dense(12,activation ='relu'))\n", "\n", "model.add(Dense(10, input_dim=6, activation='relu'))\n", "model.add(Dense(8, activation='relu'))\n", "model.add(Dense(6,activation ='relu'))\n", "\n", "model.add(Dense(6, input_dim=4, activation='relu'))\n", "model.add(Dense(4, activation='relu'))\n", "model.add(Dense(1,activation ='sigmoid'))\n", "model.summary()" ], "execution_count": 9, "outputs": [ { "output_type": "stream", "text": [ "Model: \"sequential_3\"\n", "_________________________________________________________________\n", "Layer (type) Output Shape Param # \n", "=================================================================\n", "dense_27 (Dense) (None, 16) 144 \n", "_________________________________________________________________\n", "dense_28 (Dense) (None, 14) 238 \n", "_________________________________________________________________\n", "dense_29 (Dense) (None, 12) 180 \n", "_________________________________________________________________\n", "dense_30 (Dense) (None, 10) 130 \n", "_________________________________________________________________\n", "dense_31 (Dense) (None, 8) 88 \n", "_________________________________________________________________\n", "dense_32 (Dense) (None, 6) 54 \n", "_________________________________________________________________\n", "dense_33 (Dense) (None, 6) 42 \n", "_________________________________________________________________\n", "dense_34 (Dense) (None, 4) 28 \n", "_________________________________________________________________\n", "dense_35 (Dense) (None, 1) 5 \n", "=================================================================\n", "Total params: 909\n", "Trainable params: 909\n", "Non-trainable params: 0\n", "_________________________________________________________________\n" ], "name": "stdout" } ] }, { "cell_type": "code", "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "vJX-yDwdSRus", "outputId": "f1730115-eef5-43b2-903a-c0bee8b49596" }, "source": [ "#Compile the model\n", "import numpy as np\n", "model.compile(loss=\"binary_crossentropy\", optimizer=\"adam\", metrics=['accuracy',tf.keras.metrics.Precision(name='precision'),\n", " tf.keras.metrics.Recall(name='recall')])\n", "\n", "#fit the keras model on the dataset\n", "history=model.fit(X,Y,epochs=500, batch_size=50 )\n", "\n", "#evaluate the keras model\n", "_,accuracy,P,R = model.evaluate(x_train,y_train)\n", "print('Train Accuracy: %.2f' % (accuracy*100))\n", "print('Train Precision: %.2f' % (P*100))\n", "print('Train Recall: %.2f' % (R*100))\n", "#predictions = model.predict_classes(x_train)\n", "\n", "_,accuracy,P,R = model.evaluate(x_test,y_test)\n", "print('Test Accuracy: %.2f' % (accuracy*100))\n", "print('Test Precision: %.2f' % (accuracy*100))\n", "print('Test Recall: %.2f' % (accuracy*100))\n", "#predictions = model.predict_classes(x_test)\n", "#p1=model.predict_classes(x_test)\n", "#y_pred= model.predict_classes(X_test)\n", "# summarize the first 15 cases\n" ], "execution_count": 10, "outputs": [ { "output_type": "stream", "text": [ "Epoch 1/500\n", "16/16 [==============================] - 1s 2ms/step - loss: 0.6889 - accuracy: 0.6471 - precision: 0.4940 - recall: 0.4590\n", "Epoch 2/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6784 - accuracy: 0.7070 - precision: 0.7172 - recall: 0.2649\n", "Epoch 3/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6643 - accuracy: 0.7188 - precision: 0.7281 - recall: 0.3097\n", "Epoch 4/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6446 - accuracy: 0.7344 - precision: 0.6905 - recall: 0.4328\n", "Epoch 5/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.6186 - accuracy: 0.7461 - precision: 0.6952 - recall: 0.4851\n", "Epoch 6/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5828 - accuracy: 0.7552 - precision: 0.7247 - recall: 0.4813\n", "Epoch 7/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5415 - accuracy: 0.7565 - precision: 0.7189 - recall: 0.4963\n", "Epoch 8/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.5068 - accuracy: 0.7656 - precision: 0.7292 - recall: 0.5224\n", "Epoch 9/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4843 - accuracy: 0.7643 - precision: 0.7208 - recall: 0.5299\n", "Epoch 10/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4739 - accuracy: 0.7799 - precision: 0.7463 - recall: 0.5597\n", "Epoch 11/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4634 - accuracy: 0.7786 - precision: 0.7356 - recall: 0.5709\n", "Epoch 12/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.4584 - accuracy: 0.7826 - precision: 0.7371 - recall: 0.5858\n", "Epoch 13/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4505 - accuracy: 0.7865 - precision: 0.7574 - recall: 0.5709\n", "Epoch 14/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4487 - accuracy: 0.7878 - precision: 0.7354 - recall: 0.6119\n", "Epoch 15/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4448 - accuracy: 0.7917 - precision: 0.7571 - recall: 0.5933\n", "Epoch 16/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4397 - accuracy: 0.7956 - precision: 0.7511 - recall: 0.6194\n", "Epoch 17/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4374 - accuracy: 0.7956 - precision: 0.7403 - recall: 0.6381\n", "Epoch 18/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4362 - accuracy: 0.8008 - precision: 0.7533 - recall: 0.6381\n", "Epoch 19/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4377 - accuracy: 0.7917 - precision: 0.7411 - recall: 0.6194\n", "Epoch 20/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4319 - accuracy: 0.7943 - precision: 0.7570 - recall: 0.6045\n", "Epoch 21/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4283 - accuracy: 0.7995 - precision: 0.7395 - recall: 0.6567\n", "Epoch 22/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4277 - accuracy: 0.7969 - precision: 0.7593 - recall: 0.6119\n", "Epoch 23/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4285 - accuracy: 0.7969 - precision: 0.7222 - recall: 0.6791\n", "Epoch 24/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4234 - accuracy: 0.7969 - precision: 0.7478 - recall: 0.6306\n", "Epoch 25/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4230 - accuracy: 0.8021 - precision: 0.7437 - recall: 0.6604\n", "Epoch 26/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4200 - accuracy: 0.7982 - precision: 0.7467 - recall: 0.6381\n", "Epoch 27/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4166 - accuracy: 0.8099 - precision: 0.7521 - recall: 0.6791\n", "Epoch 28/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4159 - accuracy: 0.8034 - precision: 0.7577 - recall: 0.6418\n", "Epoch 29/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4145 - accuracy: 0.8034 - precision: 0.7489 - recall: 0.6567\n", "Epoch 30/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4114 - accuracy: 0.8047 - precision: 0.7379 - recall: 0.6828\n", "Epoch 31/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4098 - accuracy: 0.8047 - precision: 0.7565 - recall: 0.6493\n", "Epoch 32/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4105 - accuracy: 0.7956 - precision: 0.7079 - recall: 0.7052\n", "Epoch 33/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4071 - accuracy: 0.8021 - precision: 0.7302 - recall: 0.6866\n", "Epoch 34/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4086 - accuracy: 0.8112 - precision: 0.7470 - recall: 0.6940\n", "Epoch 35/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.4039 - accuracy: 0.8099 - precision: 0.7629 - recall: 0.6604\n", "Epoch 36/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4018 - accuracy: 0.8021 - precision: 0.7248 - recall: 0.6978\n", "Epoch 37/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.4001 - accuracy: 0.8086 - precision: 0.7510 - recall: 0.6754\n", "Epoch 38/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3980 - accuracy: 0.8086 - precision: 0.7300 - recall: 0.7164\n", "Epoch 39/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3977 - accuracy: 0.8112 - precision: 0.7573 - recall: 0.6754\n", "Epoch 40/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3949 - accuracy: 0.8138 - precision: 0.7289 - recall: 0.7425\n", "Epoch 41/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3964 - accuracy: 0.8268 - precision: 0.7872 - recall: 0.6903\n", "Epoch 42/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3936 - accuracy: 0.8138 - precision: 0.7551 - recall: 0.6903\n", "Epoch 43/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3911 - accuracy: 0.8112 - precision: 0.7375 - recall: 0.7127\n", "Epoch 44/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3886 - accuracy: 0.8164 - precision: 0.7550 - recall: 0.7015\n", "Epoch 45/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3891 - accuracy: 0.8073 - precision: 0.7273 - recall: 0.7164\n", "Epoch 46/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3859 - accuracy: 0.8216 - precision: 0.7510 - recall: 0.7313\n", "Epoch 47/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3851 - accuracy: 0.8125 - precision: 0.7313 - recall: 0.7313\n", "Epoch 48/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3831 - accuracy: 0.8229 - precision: 0.7619 - recall: 0.7164\n", "Epoch 49/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3805 - accuracy: 0.8255 - precision: 0.7702 - recall: 0.7127\n", "Epoch 50/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3801 - accuracy: 0.8268 - precision: 0.7626 - recall: 0.7313\n", "Epoch 51/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3812 - accuracy: 0.8320 - precision: 0.7663 - recall: 0.7463\n", "Epoch 52/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3820 - accuracy: 0.8203 - precision: 0.7539 - recall: 0.7201\n", "Epoch 53/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3758 - accuracy: 0.8333 - precision: 0.7966 - recall: 0.7015\n", "Epoch 54/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3743 - accuracy: 0.8242 - precision: 0.7418 - recall: 0.7612\n", "Epoch 55/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3760 - accuracy: 0.8346 - precision: 0.7809 - recall: 0.7313\n", "Epoch 56/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3727 - accuracy: 0.8307 - precision: 0.7717 - recall: 0.7313\n", "Epoch 57/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3677 - accuracy: 0.8359 - precision: 0.7710 - recall: 0.7537\n", "Epoch 58/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3646 - accuracy: 0.8398 - precision: 0.7821 - recall: 0.7500\n", "Epoch 59/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3651 - accuracy: 0.8424 - precision: 0.7928 - recall: 0.7425\n", "Epoch 60/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3664 - accuracy: 0.8372 - precision: 0.7849 - recall: 0.7351\n", "Epoch 61/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3624 - accuracy: 0.8359 - precision: 0.7817 - recall: 0.7351\n", "Epoch 62/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3601 - accuracy: 0.8398 - precision: 0.7843 - recall: 0.7463\n", "Epoch 63/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3562 - accuracy: 0.8503 - precision: 0.7909 - recall: 0.7761\n", "Epoch 64/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3593 - accuracy: 0.8411 - precision: 0.7704 - recall: 0.7761\n", "Epoch 65/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3609 - accuracy: 0.8477 - precision: 0.7984 - recall: 0.7537\n", "Epoch 66/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3528 - accuracy: 0.8477 - precision: 0.7984 - recall: 0.7537\n", "Epoch 67/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3528 - accuracy: 0.8477 - precision: 0.7893 - recall: 0.7687\n", "Epoch 68/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3492 - accuracy: 0.8516 - precision: 0.8031 - recall: 0.7612\n", "Epoch 69/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3496 - accuracy: 0.8490 - precision: 0.7923 - recall: 0.7687\n", "Epoch 70/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3455 - accuracy: 0.8490 - precision: 0.7794 - recall: 0.7910\n", "Epoch 71/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3447 - accuracy: 0.8503 - precision: 0.7954 - recall: 0.7687\n", "Epoch 72/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3451 - accuracy: 0.8424 - precision: 0.7692 - recall: 0.7836\n", "Epoch 73/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3451 - accuracy: 0.8529 - precision: 0.7881 - recall: 0.7910\n", "Epoch 74/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3426 - accuracy: 0.8516 - precision: 0.7895 - recall: 0.7836\n", "Epoch 75/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3436 - accuracy: 0.8516 - precision: 0.7939 - recall: 0.7761\n", "Epoch 76/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3354 - accuracy: 0.8581 - precision: 0.7955 - recall: 0.7985\n", "Epoch 77/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3349 - accuracy: 0.8568 - precision: 0.8038 - recall: 0.7799\n", "Epoch 78/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3360 - accuracy: 0.8620 - precision: 0.8115 - recall: 0.7873\n", "Epoch 79/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3320 - accuracy: 0.8607 - precision: 0.8132 - recall: 0.7799\n", "Epoch 80/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3269 - accuracy: 0.8607 - precision: 0.8084 - recall: 0.7873\n", "Epoch 81/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3265 - accuracy: 0.8633 - precision: 0.8030 - recall: 0.8060\n", "Epoch 82/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3275 - accuracy: 0.8620 - precision: 0.8240 - recall: 0.7687\n", "Epoch 83/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3309 - accuracy: 0.8581 - precision: 0.7849 - recall: 0.8172\n", "Epoch 84/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3252 - accuracy: 0.8672 - precision: 0.8217 - recall: 0.7910\n", "Epoch 85/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3214 - accuracy: 0.8685 - precision: 0.8081 - recall: 0.8172\n", "Epoch 86/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3233 - accuracy: 0.8672 - precision: 0.8144 - recall: 0.8022\n", "Epoch 87/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3183 - accuracy: 0.8646 - precision: 0.8083 - recall: 0.8022\n", "Epoch 88/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3185 - accuracy: 0.8724 - precision: 0.8195 - recall: 0.8134\n", "Epoch 89/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3145 - accuracy: 0.8750 - precision: 0.8282 - recall: 0.8097\n", "Epoch 90/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3123 - accuracy: 0.8750 - precision: 0.8185 - recall: 0.8246\n", "Epoch 91/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3103 - accuracy: 0.8672 - precision: 0.8097 - recall: 0.8097\n", "Epoch 92/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.3129 - accuracy: 0.8724 - precision: 0.7993 - recall: 0.8470\n", "Epoch 93/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3076 - accuracy: 0.8750 - precision: 0.8162 - recall: 0.8284\n", "Epoch 94/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3041 - accuracy: 0.8750 - precision: 0.8162 - recall: 0.8284\n", "Epoch 95/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3071 - accuracy: 0.8724 - precision: 0.8172 - recall: 0.8172\n", "Epoch 96/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3081 - accuracy: 0.8737 - precision: 0.8109 - recall: 0.8321\n", "Epoch 97/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3163 - accuracy: 0.8659 - precision: 0.7978 - recall: 0.8246\n", "Epoch 98/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3070 - accuracy: 0.8776 - precision: 0.8246 - recall: 0.8246\n", "Epoch 99/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2990 - accuracy: 0.8828 - precision: 0.8346 - recall: 0.8284\n", "Epoch 100/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2978 - accuracy: 0.8802 - precision: 0.8143 - recall: 0.8507\n", "Epoch 101/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2965 - accuracy: 0.8776 - precision: 0.8199 - recall: 0.8321\n", "Epoch 102/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2960 - accuracy: 0.8815 - precision: 0.8340 - recall: 0.8246\n", "Epoch 103/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3233 - accuracy: 0.8568 - precision: 0.7862 - recall: 0.8097\n", "Epoch 104/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.3091 - accuracy: 0.8672 - precision: 0.7804 - recall: 0.8619\n", "Epoch 105/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2882 - accuracy: 0.8880 - precision: 0.8346 - recall: 0.8470\n", "Epoch 106/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2895 - accuracy: 0.8828 - precision: 0.8225 - recall: 0.8470\n", "Epoch 107/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2870 - accuracy: 0.8893 - precision: 0.8427 - recall: 0.8396\n", "Epoch 108/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2960 - accuracy: 0.8802 - precision: 0.8121 - recall: 0.8545\n", "Epoch 109/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2912 - accuracy: 0.8841 - precision: 0.8255 - recall: 0.8470\n", "Epoch 110/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2895 - accuracy: 0.8867 - precision: 0.8175 - recall: 0.8694\n", "Epoch 111/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2848 - accuracy: 0.8854 - precision: 0.8237 - recall: 0.8545\n", "Epoch 112/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2847 - accuracy: 0.8841 - precision: 0.8303 - recall: 0.8396\n", "Epoch 113/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2770 - accuracy: 0.8945 - precision: 0.8502 - recall: 0.8470\n", "Epoch 114/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2750 - accuracy: 0.8932 - precision: 0.8419 - recall: 0.8545\n", "Epoch 115/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2760 - accuracy: 0.8906 - precision: 0.8309 - recall: 0.8619\n", "Epoch 116/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2742 - accuracy: 0.8867 - precision: 0.8315 - recall: 0.8470\n", "Epoch 117/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2751 - accuracy: 0.9010 - precision: 0.8357 - recall: 0.8918\n", "Epoch 118/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2720 - accuracy: 0.8945 - precision: 0.8327 - recall: 0.8731\n", "Epoch 119/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2676 - accuracy: 0.8971 - precision: 0.8513 - recall: 0.8545\n", "Epoch 120/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2637 - accuracy: 0.8984 - precision: 0.8467 - recall: 0.8657\n", "Epoch 121/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2626 - accuracy: 0.9036 - precision: 0.8489 - recall: 0.8806\n", "Epoch 122/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2585 - accuracy: 0.9049 - precision: 0.8571 - recall: 0.8731\n", "Epoch 123/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2570 - accuracy: 0.9010 - precision: 0.8556 - recall: 0.8619\n", "Epoch 124/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2612 - accuracy: 0.9036 - precision: 0.8540 - recall: 0.8731\n", "Epoch 125/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2620 - accuracy: 0.9010 - precision: 0.8453 - recall: 0.8769\n", "Epoch 126/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2596 - accuracy: 0.8971 - precision: 0.8462 - recall: 0.8619\n", "Epoch 127/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2558 - accuracy: 0.9076 - precision: 0.8662 - recall: 0.8694\n", "Epoch 128/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2559 - accuracy: 0.9036 - precision: 0.8540 - recall: 0.8731\n", "Epoch 129/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2523 - accuracy: 0.9102 - precision: 0.8592 - recall: 0.8881\n", "Epoch 130/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2583 - accuracy: 0.9023 - precision: 0.8587 - recall: 0.8619\n", "Epoch 131/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2506 - accuracy: 0.9102 - precision: 0.8672 - recall: 0.8769\n", "Epoch 132/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2498 - accuracy: 0.9089 - precision: 0.8667 - recall: 0.8731\n", "Epoch 133/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2465 - accuracy: 0.9180 - precision: 0.8700 - recall: 0.8993\n", "Epoch 134/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2511 - accuracy: 0.8997 - precision: 0.8498 - recall: 0.8657\n", "Epoch 135/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2468 - accuracy: 0.9102 - precision: 0.8783 - recall: 0.8619\n", "Epoch 136/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2444 - accuracy: 0.9102 - precision: 0.8672 - recall: 0.8769\n", "Epoch 137/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2421 - accuracy: 0.9089 - precision: 0.8694 - recall: 0.8694\n", "Epoch 138/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2349 - accuracy: 0.9128 - precision: 0.8577 - recall: 0.8993\n", "Epoch 139/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2415 - accuracy: 0.9115 - precision: 0.8676 - recall: 0.8806\n", "Epoch 140/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2441 - accuracy: 0.9062 - precision: 0.8577 - recall: 0.8769\n", "Epoch 141/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2445 - accuracy: 0.9062 - precision: 0.8451 - recall: 0.8955\n", "Epoch 142/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2348 - accuracy: 0.9141 - precision: 0.8826 - recall: 0.8694\n", "Epoch 143/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2449 - accuracy: 0.9115 - precision: 0.8546 - recall: 0.8993\n", "Epoch 144/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2357 - accuracy: 0.9128 - precision: 0.8655 - recall: 0.8881\n", "Epoch 145/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2317 - accuracy: 0.9036 - precision: 0.8464 - recall: 0.8843\n", "Epoch 146/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2418 - accuracy: 0.9089 - precision: 0.8750 - recall: 0.8619\n", "Epoch 147/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2592 - accuracy: 0.9076 - precision: 0.8635 - recall: 0.8731\n", "Epoch 148/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2644 - accuracy: 0.9036 - precision: 0.8514 - recall: 0.8769\n", "Epoch 149/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2371 - accuracy: 0.9102 - precision: 0.8618 - recall: 0.8843\n", "Epoch 150/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2522 - accuracy: 0.9036 - precision: 0.8647 - recall: 0.8582\n", "Epoch 151/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2410 - accuracy: 0.9036 - precision: 0.8489 - recall: 0.8806\n", "Epoch 152/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2396 - accuracy: 0.9115 - precision: 0.8597 - recall: 0.8918\n", "Epoch 153/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2333 - accuracy: 0.9141 - precision: 0.8826 - recall: 0.8694\n", "Epoch 154/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2320 - accuracy: 0.9180 - precision: 0.8810 - recall: 0.8843\n", "Epoch 155/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2341 - accuracy: 0.9128 - precision: 0.8708 - recall: 0.8806\n", "Epoch 156/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2216 - accuracy: 0.9232 - precision: 0.9066 - recall: 0.8694\n", "Epoch 157/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2242 - accuracy: 0.9193 - precision: 0.8732 - recall: 0.8993\n", "Epoch 158/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2135 - accuracy: 0.9206 - precision: 0.8791 - recall: 0.8955\n", "Epoch 159/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2173 - accuracy: 0.9154 - precision: 0.8664 - recall: 0.8955\n", "Epoch 160/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2127 - accuracy: 0.9284 - precision: 0.8901 - recall: 0.9067\n", "Epoch 161/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2149 - accuracy: 0.9232 - precision: 0.8914 - recall: 0.8881\n", "Epoch 162/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2117 - accuracy: 0.9219 - precision: 0.8939 - recall: 0.8806\n", "Epoch 163/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2071 - accuracy: 0.9297 - precision: 0.8963 - recall: 0.9030\n", "Epoch 164/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2070 - accuracy: 0.9284 - precision: 0.8873 - recall: 0.9104\n", "Epoch 165/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2035 - accuracy: 0.9297 - precision: 0.9023 - recall: 0.8955\n", "Epoch 166/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2043 - accuracy: 0.9271 - precision: 0.8841 - recall: 0.9104\n", "Epoch 167/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2063 - accuracy: 0.9271 - precision: 0.9015 - recall: 0.8881\n", "Epoch 168/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2045 - accuracy: 0.9284 - precision: 0.8845 - recall: 0.9142\n", "Epoch 169/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2040 - accuracy: 0.9349 - precision: 0.8978 - recall: 0.9179\n", "Epoch 170/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2026 - accuracy: 0.9271 - precision: 0.8955 - recall: 0.8955\n", "Epoch 171/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2059 - accuracy: 0.9193 - precision: 0.8732 - recall: 0.8993\n", "Epoch 172/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1939 - accuracy: 0.9349 - precision: 0.9067 - recall: 0.9067\n", "Epoch 173/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1965 - accuracy: 0.9388 - precision: 0.9018 - recall: 0.9254\n", "Epoch 174/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2036 - accuracy: 0.9310 - precision: 0.8967 - recall: 0.9067\n", "Epoch 175/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1943 - accuracy: 0.9310 - precision: 0.8909 - recall: 0.9142\n", "Epoch 176/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2265 - accuracy: 0.9089 - precision: 0.8640 - recall: 0.8769\n", "Epoch 177/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2218 - accuracy: 0.9219 - precision: 0.8910 - recall: 0.8843\n", "Epoch 178/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2093 - accuracy: 0.9180 - precision: 0.8782 - recall: 0.8881\n", "Epoch 179/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1931 - accuracy: 0.9336 - precision: 0.9033 - recall: 0.9067\n", "Epoch 180/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1947 - accuracy: 0.9401 - precision: 0.9051 - recall: 0.9254\n", "Epoch 181/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1973 - accuracy: 0.9258 - precision: 0.8809 - recall: 0.9104\n", "Epoch 182/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1912 - accuracy: 0.9362 - precision: 0.9195 - recall: 0.8955\n", "Epoch 183/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1897 - accuracy: 0.9414 - precision: 0.9145 - recall: 0.9179\n", "Epoch 184/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1842 - accuracy: 0.9375 - precision: 0.8986 - recall: 0.9254\n", "Epoch 185/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1856 - accuracy: 0.9427 - precision: 0.9148 - recall: 0.9216\n", "Epoch 186/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1802 - accuracy: 0.9427 - precision: 0.9029 - recall: 0.9366\n", "Epoch 187/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1806 - accuracy: 0.9427 - precision: 0.9179 - recall: 0.9179\n", "Epoch 188/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1788 - accuracy: 0.9414 - precision: 0.9114 - recall: 0.9216\n", "Epoch 189/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1804 - accuracy: 0.9401 - precision: 0.9142 - recall: 0.9142\n", "Epoch 190/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1796 - accuracy: 0.9414 - precision: 0.9084 - recall: 0.9254\n", "Epoch 191/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1754 - accuracy: 0.9440 - precision: 0.9151 - recall: 0.9254\n", "Epoch 192/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1735 - accuracy: 0.9401 - precision: 0.9051 - recall: 0.9254\n", "Epoch 193/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1777 - accuracy: 0.9362 - precision: 0.9011 - recall: 0.9179\n", "Epoch 194/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1805 - accuracy: 0.9440 - precision: 0.9061 - recall: 0.9366\n", "Epoch 195/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1889 - accuracy: 0.9336 - precision: 0.9094 - recall: 0.8993\n", "Epoch 196/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1869 - accuracy: 0.9362 - precision: 0.8982 - recall: 0.9216\n", "Epoch 197/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1813 - accuracy: 0.9362 - precision: 0.9071 - recall: 0.9104\n", "Epoch 198/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1796 - accuracy: 0.9362 - precision: 0.8925 - recall: 0.9291\n", "Epoch 199/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1706 - accuracy: 0.9414 - precision: 0.9055 - recall: 0.9291\n", "Epoch 200/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1675 - accuracy: 0.9466 - precision: 0.9188 - recall: 0.9291\n", "Epoch 201/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1667 - accuracy: 0.9479 - precision: 0.9161 - recall: 0.9366\n", "Epoch 202/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1730 - accuracy: 0.9375 - precision: 0.9044 - recall: 0.9179\n", "Epoch 203/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1695 - accuracy: 0.9414 - precision: 0.9176 - recall: 0.9142\n", "Epoch 204/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1732 - accuracy: 0.9375 - precision: 0.8929 - recall: 0.9328\n", "Epoch 205/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1676 - accuracy: 0.9453 - precision: 0.9065 - recall: 0.9403\n", "Epoch 206/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1664 - accuracy: 0.9388 - precision: 0.9108 - recall: 0.9142\n", "Epoch 207/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1668 - accuracy: 0.9453 - precision: 0.9124 - recall: 0.9328\n", "Epoch 208/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1621 - accuracy: 0.9531 - precision: 0.9296 - recall: 0.9366\n", "Epoch 209/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1661 - accuracy: 0.9401 - precision: 0.9022 - recall: 0.9291\n", "Epoch 210/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1652 - accuracy: 0.9505 - precision: 0.9356 - recall: 0.9216\n", "Epoch 211/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1624 - accuracy: 0.9427 - precision: 0.9088 - recall: 0.9291\n", "Epoch 212/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1576 - accuracy: 0.9505 - precision: 0.9167 - recall: 0.9440\n", "Epoch 213/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1637 - accuracy: 0.9518 - precision: 0.9392 - recall: 0.9216\n", "Epoch 214/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1603 - accuracy: 0.9479 - precision: 0.9101 - recall: 0.9440\n", "Epoch 215/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1649 - accuracy: 0.9466 - precision: 0.9251 - recall: 0.9216\n", "Epoch 216/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1596 - accuracy: 0.9466 - precision: 0.9097 - recall: 0.9403\n", "Epoch 217/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1579 - accuracy: 0.9427 - precision: 0.9211 - recall: 0.9142\n", "Epoch 218/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1543 - accuracy: 0.9505 - precision: 0.9228 - recall: 0.9366\n", "Epoch 219/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1727 - accuracy: 0.9323 - precision: 0.9091 - recall: 0.8955\n", "Epoch 220/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1853 - accuracy: 0.9271 - precision: 0.8897 - recall: 0.9030\n", "Epoch 221/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2139 - accuracy: 0.9167 - precision: 0.8592 - recall: 0.9104\n", "Epoch 222/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1807 - accuracy: 0.9362 - precision: 0.8982 - recall: 0.9216\n", "Epoch 223/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1919 - accuracy: 0.9245 - precision: 0.8860 - recall: 0.8993\n", "Epoch 224/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1682 - accuracy: 0.9349 - precision: 0.8978 - recall: 0.9179\n", "Epoch 225/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1698 - accuracy: 0.9414 - precision: 0.9145 - recall: 0.9179\n", "Epoch 226/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1782 - accuracy: 0.9414 - precision: 0.9055 - recall: 0.9291\n", "Epoch 227/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1582 - accuracy: 0.9479 - precision: 0.9286 - recall: 0.9216\n", "Epoch 228/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1507 - accuracy: 0.9531 - precision: 0.9265 - recall: 0.9403\n", "Epoch 229/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.1572 - accuracy: 0.9440 - precision: 0.9245 - recall: 0.9142\n", "Epoch 230/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1489 - accuracy: 0.9544 - precision: 0.9299 - recall: 0.9403\n", "Epoch 231/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1453 - accuracy: 0.9518 - precision: 0.9326 - recall: 0.9291\n", "Epoch 232/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1437 - accuracy: 0.9583 - precision: 0.9403 - recall: 0.9403\n", "Epoch 233/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1465 - accuracy: 0.9453 - precision: 0.9154 - recall: 0.9291\n", "Epoch 234/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1462 - accuracy: 0.9479 - precision: 0.9222 - recall: 0.9291\n", "Epoch 235/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1556 - accuracy: 0.9531 - precision: 0.9462 - recall: 0.9179\n", "Epoch 236/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1473 - accuracy: 0.9531 - precision: 0.9265 - recall: 0.9403\n", "Epoch 237/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1401 - accuracy: 0.9557 - precision: 0.9398 - recall: 0.9328\n", "Epoch 238/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1383 - accuracy: 0.9570 - precision: 0.9336 - recall: 0.9440\n", "Epoch 239/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1409 - accuracy: 0.9531 - precision: 0.9296 - recall: 0.9366\n", "Epoch 240/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1607 - accuracy: 0.9466 - precision: 0.9158 - recall: 0.9328\n", "Epoch 241/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1634 - accuracy: 0.9401 - precision: 0.9081 - recall: 0.9216\n", "Epoch 242/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1566 - accuracy: 0.9479 - precision: 0.9318 - recall: 0.9179\n", "Epoch 243/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1500 - accuracy: 0.9531 - precision: 0.9234 - recall: 0.9440\n", "Epoch 244/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1476 - accuracy: 0.9557 - precision: 0.9466 - recall: 0.9254\n", "Epoch 245/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1669 - accuracy: 0.9427 - precision: 0.9000 - recall: 0.9403\n", "Epoch 246/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1654 - accuracy: 0.9401 - precision: 0.9142 - recall: 0.9142\n", "Epoch 247/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1445 - accuracy: 0.9518 - precision: 0.9326 - recall: 0.9291\n", "Epoch 248/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1509 - accuracy: 0.9466 - precision: 0.9127 - recall: 0.9366\n", "Epoch 249/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1408 - accuracy: 0.9557 - precision: 0.9398 - recall: 0.9328\n", "Epoch 250/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1343 - accuracy: 0.9544 - precision: 0.9331 - recall: 0.9366\n", "Epoch 251/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1370 - accuracy: 0.9544 - precision: 0.9299 - recall: 0.9403\n", "Epoch 252/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1405 - accuracy: 0.9544 - precision: 0.9363 - recall: 0.9328\n", "Epoch 253/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1281 - accuracy: 0.9635 - precision: 0.9545 - recall: 0.9403\n", "Epoch 254/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1337 - accuracy: 0.9531 - precision: 0.9203 - recall: 0.9478\n", "Epoch 255/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1382 - accuracy: 0.9557 - precision: 0.9398 - recall: 0.9328\n", "Epoch 256/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1375 - accuracy: 0.9505 - precision: 0.9356 - recall: 0.9216\n", "Epoch 257/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1367 - accuracy: 0.9505 - precision: 0.9167 - recall: 0.9440\n", "Epoch 258/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1311 - accuracy: 0.9531 - precision: 0.9394 - recall: 0.9254\n", "Epoch 259/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1336 - accuracy: 0.9570 - precision: 0.9304 - recall: 0.9478\n", "Epoch 260/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1320 - accuracy: 0.9557 - precision: 0.9398 - recall: 0.9328\n", "Epoch 261/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1228 - accuracy: 0.9635 - precision: 0.9511 - recall: 0.9440\n", "Epoch 262/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1230 - accuracy: 0.9583 - precision: 0.9436 - recall: 0.9366\n", "Epoch 263/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1243 - accuracy: 0.9583 - precision: 0.9370 - recall: 0.9440\n", "Epoch 264/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1207 - accuracy: 0.9609 - precision: 0.9440 - recall: 0.9440\n", "Epoch 265/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1272 - accuracy: 0.9570 - precision: 0.9336 - recall: 0.9440\n", "Epoch 266/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1319 - accuracy: 0.9544 - precision: 0.9267 - recall: 0.9440\n", "Epoch 267/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1400 - accuracy: 0.9479 - precision: 0.9318 - recall: 0.9179\n", "Epoch 268/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1245 - accuracy: 0.9635 - precision: 0.9545 - recall: 0.9403\n", "Epoch 269/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1342 - accuracy: 0.9466 - precision: 0.9188 - recall: 0.9291\n", "Epoch 270/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1400 - accuracy: 0.9466 - precision: 0.9283 - recall: 0.9179\n", "Epoch 271/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1535 - accuracy: 0.9375 - precision: 0.9044 - recall: 0.9179\n", "Epoch 272/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1521 - accuracy: 0.9414 - precision: 0.9208 - recall: 0.9104\n", "Epoch 273/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1513 - accuracy: 0.9492 - precision: 0.9164 - recall: 0.9403\n", "Epoch 274/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1595 - accuracy: 0.9336 - precision: 0.9064 - recall: 0.9030\n", "Epoch 275/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1324 - accuracy: 0.9505 - precision: 0.9137 - recall: 0.9478\n", "Epoch 276/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1181 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 277/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1162 - accuracy: 0.9622 - precision: 0.9476 - recall: 0.9440\n", "Epoch 278/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1188 - accuracy: 0.9609 - precision: 0.9440 - recall: 0.9440\n", "Epoch 279/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1264 - accuracy: 0.9544 - precision: 0.9236 - recall: 0.9478\n", "Epoch 280/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1278 - accuracy: 0.9544 - precision: 0.9498 - recall: 0.9179\n", "Epoch 281/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1294 - accuracy: 0.9518 - precision: 0.9425 - recall: 0.9179\n", "Epoch 282/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1279 - accuracy: 0.9570 - precision: 0.9401 - recall: 0.9366\n", "Epoch 283/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1477 - accuracy: 0.9453 - precision: 0.9185 - recall: 0.9254\n", "Epoch 284/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1629 - accuracy: 0.9349 - precision: 0.9129 - recall: 0.8993\n", "Epoch 285/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1456 - accuracy: 0.9401 - precision: 0.9142 - recall: 0.9142\n", "Epoch 286/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1474 - accuracy: 0.9414 - precision: 0.9114 - recall: 0.9216\n", "Epoch 287/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1269 - accuracy: 0.9531 - precision: 0.9265 - recall: 0.9403\n", "Epoch 288/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1528 - accuracy: 0.9453 - precision: 0.9154 - recall: 0.9291\n", "Epoch 289/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1364 - accuracy: 0.9531 - precision: 0.9462 - recall: 0.9179\n", "Epoch 290/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1287 - accuracy: 0.9518 - precision: 0.9358 - recall: 0.9254\n", "Epoch 291/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.1345 - accuracy: 0.9531 - precision: 0.9361 - recall: 0.9291\n", "Epoch 292/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1322 - accuracy: 0.9518 - precision: 0.9231 - recall: 0.9403\n", "Epoch 293/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1192 - accuracy: 0.9570 - precision: 0.9502 - recall: 0.9254\n", "Epoch 294/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1096 - accuracy: 0.9648 - precision: 0.9480 - recall: 0.9515\n", "Epoch 295/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1055 - accuracy: 0.9661 - precision: 0.9549 - recall: 0.9478\n", "Epoch 296/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1025 - accuracy: 0.9688 - precision: 0.9621 - recall: 0.9478\n", "Epoch 297/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1027 - accuracy: 0.9688 - precision: 0.9656 - recall: 0.9440\n", "Epoch 298/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1107 - accuracy: 0.9622 - precision: 0.9544 - recall: 0.9366\n", "Epoch 299/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1178 - accuracy: 0.9583 - precision: 0.9338 - recall: 0.9478\n", "Epoch 300/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1259 - accuracy: 0.9583 - precision: 0.9370 - recall: 0.9440\n", "Epoch 301/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1316 - accuracy: 0.9518 - precision: 0.9459 - recall: 0.9142\n", "Epoch 302/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1127 - accuracy: 0.9570 - precision: 0.9304 - recall: 0.9478\n", "Epoch 303/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1067 - accuracy: 0.9622 - precision: 0.9476 - recall: 0.9440\n", "Epoch 304/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1092 - accuracy: 0.9635 - precision: 0.9511 - recall: 0.9440\n", "Epoch 305/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0961 - accuracy: 0.9688 - precision: 0.9656 - recall: 0.9440\n", "Epoch 306/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0970 - accuracy: 0.9688 - precision: 0.9586 - recall: 0.9515\n", "Epoch 307/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0982 - accuracy: 0.9701 - precision: 0.9658 - recall: 0.9478\n", "Epoch 308/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0987 - accuracy: 0.9701 - precision: 0.9658 - recall: 0.9478\n", "Epoch 309/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0976 - accuracy: 0.9688 - precision: 0.9552 - recall: 0.9552\n", "Epoch 310/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0942 - accuracy: 0.9701 - precision: 0.9623 - recall: 0.9515\n", "Epoch 311/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0930 - accuracy: 0.9674 - precision: 0.9620 - recall: 0.9440\n", "Epoch 312/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0927 - accuracy: 0.9688 - precision: 0.9621 - recall: 0.9478\n", "Epoch 313/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0995 - accuracy: 0.9648 - precision: 0.9480 - recall: 0.9515\n", "Epoch 314/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1018 - accuracy: 0.9661 - precision: 0.9583 - recall: 0.9440\n", "Epoch 315/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1111 - accuracy: 0.9557 - precision: 0.9366 - recall: 0.9366\n", "Epoch 316/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1038 - accuracy: 0.9661 - precision: 0.9549 - recall: 0.9478\n", "Epoch 317/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1009 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 318/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0924 - accuracy: 0.9661 - precision: 0.9549 - recall: 0.9478\n", "Epoch 319/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0963 - accuracy: 0.9727 - precision: 0.9696 - recall: 0.9515\n", "Epoch 320/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0931 - accuracy: 0.9661 - precision: 0.9549 - recall: 0.9478\n", "Epoch 321/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0908 - accuracy: 0.9688 - precision: 0.9586 - recall: 0.9515\n", "Epoch 322/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0939 - accuracy: 0.9661 - precision: 0.9515 - recall: 0.9515\n", "Epoch 323/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0885 - accuracy: 0.9688 - precision: 0.9586 - recall: 0.9515\n", "Epoch 324/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0908 - accuracy: 0.9727 - precision: 0.9660 - recall: 0.9552\n", "Epoch 325/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0924 - accuracy: 0.9648 - precision: 0.9582 - recall: 0.9403\n", "Epoch 326/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0915 - accuracy: 0.9701 - precision: 0.9658 - recall: 0.9478\n", "Epoch 327/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0934 - accuracy: 0.9648 - precision: 0.9480 - recall: 0.9515\n", "Epoch 328/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0887 - accuracy: 0.9714 - precision: 0.9659 - recall: 0.9515\n", "Epoch 329/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0924 - accuracy: 0.9648 - precision: 0.9513 - recall: 0.9478\n", "Epoch 330/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0867 - accuracy: 0.9714 - precision: 0.9624 - recall: 0.9552\n", "Epoch 331/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0853 - accuracy: 0.9688 - precision: 0.9552 - recall: 0.9552\n", "Epoch 332/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0819 - accuracy: 0.9753 - precision: 0.9770 - recall: 0.9515\n", "Epoch 333/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0833 - accuracy: 0.9701 - precision: 0.9588 - recall: 0.9552\n", "Epoch 334/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0843 - accuracy: 0.9714 - precision: 0.9659 - recall: 0.9515\n", "Epoch 335/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0865 - accuracy: 0.9674 - precision: 0.9483 - recall: 0.9590\n", "Epoch 336/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0867 - accuracy: 0.9714 - precision: 0.9731 - recall: 0.9440\n", "Epoch 337/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0877 - accuracy: 0.9661 - precision: 0.9515 - recall: 0.9515\n", "Epoch 338/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0821 - accuracy: 0.9727 - precision: 0.9625 - recall: 0.9590\n", "Epoch 339/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0840 - accuracy: 0.9701 - precision: 0.9588 - recall: 0.9552\n", "Epoch 340/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1111 - accuracy: 0.9544 - precision: 0.9299 - recall: 0.9403\n", "Epoch 341/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1033 - accuracy: 0.9609 - precision: 0.9440 - recall: 0.9440\n", "Epoch 342/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0995 - accuracy: 0.9635 - precision: 0.9444 - recall: 0.9515\n", "Epoch 343/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0931 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 344/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1027 - accuracy: 0.9544 - precision: 0.9331 - recall: 0.9366\n", "Epoch 345/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0824 - accuracy: 0.9701 - precision: 0.9623 - recall: 0.9515\n", "Epoch 346/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0908 - accuracy: 0.9661 - precision: 0.9449 - recall: 0.9590\n", "Epoch 347/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0859 - accuracy: 0.9661 - precision: 0.9549 - recall: 0.9478\n", "Epoch 348/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0829 - accuracy: 0.9674 - precision: 0.9585 - recall: 0.9478\n", "Epoch 349/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1041 - accuracy: 0.9596 - precision: 0.9405 - recall: 0.9440\n", "Epoch 350/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1059 - accuracy: 0.9635 - precision: 0.9545 - recall: 0.9403\n", "Epoch 351/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0999 - accuracy: 0.9622 - precision: 0.9544 - recall: 0.9366\n", "Epoch 352/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0988 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 353/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1156 - accuracy: 0.9570 - precision: 0.9368 - recall: 0.9403\n", "Epoch 354/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1108 - accuracy: 0.9596 - precision: 0.9472 - recall: 0.9366\n", "Epoch 355/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1376 - accuracy: 0.9466 - precision: 0.9349 - recall: 0.9104\n", "Epoch 356/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1319 - accuracy: 0.9466 - precision: 0.9158 - recall: 0.9328\n", "Epoch 357/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1718 - accuracy: 0.9336 - precision: 0.8974 - recall: 0.9142\n", "Epoch 358/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1281 - accuracy: 0.9466 - precision: 0.9127 - recall: 0.9366\n", "Epoch 359/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1151 - accuracy: 0.9518 - precision: 0.9392 - recall: 0.9216\n", "Epoch 360/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0985 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 361/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0862 - accuracy: 0.9701 - precision: 0.9623 - recall: 0.9515\n", "Epoch 362/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0803 - accuracy: 0.9714 - precision: 0.9624 - recall: 0.9552\n", "Epoch 363/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0783 - accuracy: 0.9701 - precision: 0.9658 - recall: 0.9478\n", "Epoch 364/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0749 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 365/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0742 - accuracy: 0.9740 - precision: 0.9733 - recall: 0.9515\n", "Epoch 366/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0780 - accuracy: 0.9727 - precision: 0.9696 - recall: 0.9515\n", "Epoch 367/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0784 - accuracy: 0.9727 - precision: 0.9625 - recall: 0.9590\n", "Epoch 368/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.0755 - accuracy: 0.9753 - precision: 0.9628 - recall: 0.9664\n", "Epoch 369/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.0809 - accuracy: 0.9740 - precision: 0.9662 - recall: 0.9590\n", "Epoch 370/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0697 - accuracy: 0.9740 - precision: 0.9697 - recall: 0.9552\n", "Epoch 371/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0763 - accuracy: 0.9740 - precision: 0.9662 - recall: 0.9590\n", "Epoch 372/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0833 - accuracy: 0.9688 - precision: 0.9485 - recall: 0.9627\n", "Epoch 373/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0789 - accuracy: 0.9688 - precision: 0.9519 - recall: 0.9590\n", "Epoch 374/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0739 - accuracy: 0.9688 - precision: 0.9552 - recall: 0.9552\n", "Epoch 375/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0709 - accuracy: 0.9740 - precision: 0.9697 - recall: 0.9552\n", "Epoch 376/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0695 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 377/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0710 - accuracy: 0.9727 - precision: 0.9625 - recall: 0.9590\n", "Epoch 378/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0826 - accuracy: 0.9635 - precision: 0.9545 - recall: 0.9403\n", "Epoch 379/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0778 - accuracy: 0.9701 - precision: 0.9554 - recall: 0.9590\n", "Epoch 380/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0778 - accuracy: 0.9674 - precision: 0.9585 - recall: 0.9478\n", "Epoch 381/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0713 - accuracy: 0.9740 - precision: 0.9662 - recall: 0.9590\n", "Epoch 382/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0849 - accuracy: 0.9622 - precision: 0.9509 - recall: 0.9403\n", "Epoch 383/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0765 - accuracy: 0.9701 - precision: 0.9455 - recall: 0.9701\n", "Epoch 384/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0733 - accuracy: 0.9727 - precision: 0.9591 - recall: 0.9627\n", "Epoch 385/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0721 - accuracy: 0.9753 - precision: 0.9698 - recall: 0.9590\n", "Epoch 386/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0710 - accuracy: 0.9714 - precision: 0.9624 - recall: 0.9552\n", "Epoch 387/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0634 - accuracy: 0.9766 - precision: 0.9771 - recall: 0.9552\n", "Epoch 388/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0653 - accuracy: 0.9779 - precision: 0.9700 - recall: 0.9664\n", "Epoch 389/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0664 - accuracy: 0.9740 - precision: 0.9559 - recall: 0.9701\n", "Epoch 390/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0631 - accuracy: 0.9779 - precision: 0.9700 - recall: 0.9664\n", "Epoch 391/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0646 - accuracy: 0.9740 - precision: 0.9662 - recall: 0.9590\n", "Epoch 392/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0578 - accuracy: 0.9792 - precision: 0.9701 - recall: 0.9701\n", "Epoch 393/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0710 - accuracy: 0.9688 - precision: 0.9552 - recall: 0.9552\n", "Epoch 394/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0850 - accuracy: 0.9648 - precision: 0.9414 - recall: 0.9590\n", "Epoch 395/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0926 - accuracy: 0.9635 - precision: 0.9580 - recall: 0.9366\n", "Epoch 396/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0732 - accuracy: 0.9688 - precision: 0.9586 - recall: 0.9515\n", "Epoch 397/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0643 - accuracy: 0.9766 - precision: 0.9630 - recall: 0.9701\n", "Epoch 398/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0640 - accuracy: 0.9753 - precision: 0.9663 - recall: 0.9627\n", "Epoch 399/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0699 - accuracy: 0.9753 - precision: 0.9594 - recall: 0.9701\n", "Epoch 400/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0594 - accuracy: 0.9779 - precision: 0.9772 - recall: 0.9590\n", "Epoch 401/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0562 - accuracy: 0.9779 - precision: 0.9772 - recall: 0.9590\n", "Epoch 402/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0570 - accuracy: 0.9779 - precision: 0.9772 - recall: 0.9590\n", "Epoch 403/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0561 - accuracy: 0.9831 - precision: 0.9775 - recall: 0.9739\n", "Epoch 404/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0546 - accuracy: 0.9779 - precision: 0.9736 - recall: 0.9627\n", "Epoch 405/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0528 - accuracy: 0.9831 - precision: 0.9775 - recall: 0.9739\n", "Epoch 406/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0526 - accuracy: 0.9805 - precision: 0.9738 - recall: 0.9701\n", "Epoch 407/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0533 - accuracy: 0.9831 - precision: 0.9775 - recall: 0.9739\n", "Epoch 408/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0528 - accuracy: 0.9792 - precision: 0.9701 - recall: 0.9701\n", "Epoch 409/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0535 - accuracy: 0.9805 - precision: 0.9738 - recall: 0.9701\n", "Epoch 410/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0553 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 411/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0544 - accuracy: 0.9805 - precision: 0.9774 - recall: 0.9664\n", "Epoch 412/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0627 - accuracy: 0.9792 - precision: 0.9701 - recall: 0.9701\n", "Epoch 413/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0765 - accuracy: 0.9688 - precision: 0.9656 - recall: 0.9440\n", "Epoch 414/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1028 - accuracy: 0.9596 - precision: 0.9405 - recall: 0.9440\n", "Epoch 415/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1510 - accuracy: 0.9349 - precision: 0.9129 - recall: 0.8993\n", "Epoch 416/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.2562 - accuracy: 0.9206 - precision: 0.8683 - recall: 0.9104\n", "Epoch 417/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.1947 - accuracy: 0.9388 - precision: 0.9234 - recall: 0.8993\n", "Epoch 418/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2288 - accuracy: 0.9271 - precision: 0.8681 - recall: 0.9328\n", "Epoch 419/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.2184 - accuracy: 0.9349 - precision: 0.9129 - recall: 0.8993\n", "Epoch 420/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.1682 - accuracy: 0.9440 - precision: 0.9182 - recall: 0.9216\n", "Epoch 421/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0964 - accuracy: 0.9648 - precision: 0.9414 - recall: 0.9590\n", "Epoch 422/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0901 - accuracy: 0.9648 - precision: 0.9617 - recall: 0.9366\n", "Epoch 423/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0802 - accuracy: 0.9714 - precision: 0.9522 - recall: 0.9664\n", "Epoch 424/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0878 - accuracy: 0.9609 - precision: 0.9440 - recall: 0.9440\n", "Epoch 425/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0719 - accuracy: 0.9701 - precision: 0.9588 - recall: 0.9552\n", "Epoch 426/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0591 - accuracy: 0.9805 - precision: 0.9738 - recall: 0.9701\n", "Epoch 427/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0618 - accuracy: 0.9779 - precision: 0.9700 - recall: 0.9664\n", "Epoch 428/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0662 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 429/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0821 - accuracy: 0.9674 - precision: 0.9585 - recall: 0.9478\n", "Epoch 430/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0638 - accuracy: 0.9792 - precision: 0.9667 - recall: 0.9739\n", "Epoch 431/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0550 - accuracy: 0.9805 - precision: 0.9774 - recall: 0.9664\n", "Epoch 432/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0567 - accuracy: 0.9831 - precision: 0.9705 - recall: 0.9813\n", "Epoch 433/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0606 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 434/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0510 - accuracy: 0.9818 - precision: 0.9774 - recall: 0.9701\n", "Epoch 435/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0530 - accuracy: 0.9805 - precision: 0.9703 - recall: 0.9739\n", "Epoch 436/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0518 - accuracy: 0.9844 - precision: 0.9741 - recall: 0.9813\n", "Epoch 437/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0680 - accuracy: 0.9727 - precision: 0.9591 - recall: 0.9627\n", "Epoch 438/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0621 - accuracy: 0.9792 - precision: 0.9701 - recall: 0.9701\n", "Epoch 439/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0559 - accuracy: 0.9766 - precision: 0.9735 - recall: 0.9590\n", "Epoch 440/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0587 - accuracy: 0.9766 - precision: 0.9630 - recall: 0.9701\n", "Epoch 441/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0540 - accuracy: 0.9766 - precision: 0.9699 - recall: 0.9627\n", "Epoch 442/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0489 - accuracy: 0.9818 - precision: 0.9739 - recall: 0.9739\n", "Epoch 443/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0466 - accuracy: 0.9857 - precision: 0.9777 - recall: 0.9813\n", "Epoch 444/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0473 - accuracy: 0.9857 - precision: 0.9777 - recall: 0.9813\n", "Epoch 445/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0472 - accuracy: 0.9844 - precision: 0.9776 - recall: 0.9776\n", "Epoch 446/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0502 - accuracy: 0.9831 - precision: 0.9775 - recall: 0.9739\n", "Epoch 447/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0585 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 448/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0662 - accuracy: 0.9753 - precision: 0.9628 - recall: 0.9664\n", "Epoch 449/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0839 - accuracy: 0.9661 - precision: 0.9481 - recall: 0.9552\n", "Epoch 450/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.1023 - accuracy: 0.9674 - precision: 0.9620 - recall: 0.9440\n", "Epoch 451/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0908 - accuracy: 0.9661 - precision: 0.9449 - recall: 0.9590\n", "Epoch 452/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0819 - accuracy: 0.9674 - precision: 0.9655 - recall: 0.9403\n", "Epoch 453/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0575 - accuracy: 0.9805 - precision: 0.9668 - recall: 0.9776\n", "Epoch 454/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0659 - accuracy: 0.9766 - precision: 0.9664 - recall: 0.9664\n", "Epoch 455/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0602 - accuracy: 0.9766 - precision: 0.9630 - recall: 0.9701\n", "Epoch 456/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0671 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 457/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0696 - accuracy: 0.9740 - precision: 0.9662 - recall: 0.9590\n", "Epoch 458/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0752 - accuracy: 0.9674 - precision: 0.9483 - recall: 0.9590\n", "Epoch 459/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0739 - accuracy: 0.9688 - precision: 0.9621 - recall: 0.9478\n", "Epoch 460/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0607 - accuracy: 0.9753 - precision: 0.9663 - recall: 0.9627\n", "Epoch 461/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0546 - accuracy: 0.9818 - precision: 0.9739 - recall: 0.9739\n", "Epoch 462/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0566 - accuracy: 0.9805 - precision: 0.9774 - recall: 0.9664\n", "Epoch 463/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0562 - accuracy: 0.9805 - precision: 0.9634 - recall: 0.9813\n", "Epoch 464/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0598 - accuracy: 0.9753 - precision: 0.9594 - recall: 0.9701\n", "Epoch 465/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0666 - accuracy: 0.9714 - precision: 0.9522 - recall: 0.9664\n", "Epoch 466/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0527 - accuracy: 0.9805 - precision: 0.9703 - recall: 0.9739\n", "Epoch 467/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0495 - accuracy: 0.9818 - precision: 0.9704 - recall: 0.9776\n", "Epoch 468/500\n", "16/16 [==============================] - 0s 4ms/step - loss: 0.0439 - accuracy: 0.9857 - precision: 0.9777 - recall: 0.9813\n", "Epoch 469/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0486 - accuracy: 0.9792 - precision: 0.9632 - recall: 0.9776\n", "Epoch 470/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0490 - accuracy: 0.9818 - precision: 0.9669 - recall: 0.9813\n", "Epoch 471/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0532 - accuracy: 0.9792 - precision: 0.9632 - recall: 0.9776\n", "Epoch 472/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0530 - accuracy: 0.9792 - precision: 0.9667 - recall: 0.9739\n", "Epoch 473/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0459 - accuracy: 0.9818 - precision: 0.9739 - recall: 0.9739\n", "Epoch 474/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0455 - accuracy: 0.9831 - precision: 0.9705 - recall: 0.9813\n", "Epoch 475/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0455 - accuracy: 0.9831 - precision: 0.9775 - recall: 0.9739\n", "Epoch 476/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0527 - accuracy: 0.9753 - precision: 0.9594 - recall: 0.9701\n", "Epoch 477/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0480 - accuracy: 0.9818 - precision: 0.9704 - recall: 0.9776\n", "Epoch 478/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0486 - accuracy: 0.9831 - precision: 0.9740 - recall: 0.9776\n", "Epoch 479/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0419 - accuracy: 0.9844 - precision: 0.9741 - recall: 0.9813\n", "Epoch 480/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0465 - accuracy: 0.9792 - precision: 0.9667 - recall: 0.9739\n", "Epoch 481/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0478 - accuracy: 0.9792 - precision: 0.9599 - recall: 0.9813\n", "Epoch 482/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0471 - accuracy: 0.9805 - precision: 0.9668 - recall: 0.9776\n", "Epoch 483/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0497 - accuracy: 0.9753 - precision: 0.9628 - recall: 0.9664\n", "Epoch 484/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0457 - accuracy: 0.9818 - precision: 0.9739 - recall: 0.9739\n", "Epoch 485/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0411 - accuracy: 0.9857 - precision: 0.9777 - recall: 0.9813\n", "Epoch 486/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0451 - accuracy: 0.9818 - precision: 0.9739 - recall: 0.9739\n", "Epoch 487/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0580 - accuracy: 0.9779 - precision: 0.9665 - recall: 0.9701\n", "Epoch 488/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0529 - accuracy: 0.9792 - precision: 0.9667 - recall: 0.9739\n", "Epoch 489/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0443 - accuracy: 0.9805 - precision: 0.9668 - recall: 0.9776\n", "Epoch 490/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0508 - accuracy: 0.9766 - precision: 0.9630 - recall: 0.9701\n", "Epoch 491/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0438 - accuracy: 0.9844 - precision: 0.9741 - recall: 0.9813\n", "Epoch 492/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0405 - accuracy: 0.9844 - precision: 0.9741 - recall: 0.9813\n", "Epoch 493/500\n", "16/16 [==============================] - 0s 2ms/step - loss: 0.0412 - accuracy: 0.9805 - precision: 0.9738 - recall: 0.9701\n", "Epoch 494/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0375 - accuracy: 0.9857 - precision: 0.9813 - recall: 0.9776\n", "Epoch 495/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0384 - accuracy: 0.9870 - precision: 0.9778 - recall: 0.9851\n", "Epoch 496/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0369 - accuracy: 0.9909 - precision: 0.9888 - recall: 0.9851\n", "Epoch 497/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0353 - accuracy: 0.9883 - precision: 0.9814 - recall: 0.9851\n", "Epoch 498/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0340 - accuracy: 0.9883 - precision: 0.9814 - recall: 0.9851\n", "Epoch 499/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0346 - accuracy: 0.9870 - precision: 0.9850 - recall: 0.9776\n", "Epoch 500/500\n", "16/16 [==============================] - 0s 3ms/step - loss: 0.0365 - accuracy: 0.9870 - precision: 0.9886 - recall: 0.9739\n", "20/20 [==============================] - 0s 2ms/step - loss: 0.0434 - accuracy: 0.9805 - precision: 0.9607 - recall: 0.9865\n", "Train Accuracy: 98.05\n", "Train Precision: 96.07\n", "Train Recall: 98.65\n", "5/5 [==============================] - 0s 4ms/step - loss: 0.0446 - accuracy: 0.9870 - precision: 0.9778 - recall: 0.9778\n", "Test Accuracy: 98.70\n", "Test Precision: 98.70\n", "Test Recall: 98.70\n" ], "name": "stdout" } ] }, { "cell_type": "code", "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 129 }, "id": "7fjCG7tY9DfQ", "outputId": "b8f55734-8709-4814-c8af-50c4ff8019f7" }, "source": [ "\n", "print('Test F score',F11)" ], "execution_count": 16, "outputs": [ { "output_type": "error", "ename": "SyntaxError", "evalue": "ignored", "traceback": [ "\u001b[0;36m File \u001b[0;32m\"\"\u001b[0;36m, line \u001b[0;32m2\u001b[0m\n\u001b[0;31m F11 = 2 (* (0.9870 * 0.9870)) / (0.9870 + 0.9870))\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" ] } ] }, { "cell_type": "code", "metadata": { "id": "GBFaN7C7g6hy" }, "source": [ "" ], "execution_count": null, "outputs": [] } ] }