{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "CompareAE.ipynb",
"provenance": [],
"collapsed_sections": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
}
},
"cells": [
{
"cell_type": "code",
"metadata": {
"id": "D9HZTja19muZ"
},
"source": [
"import warnings\n",
"warnings.filterwarnings(\"ignore\")\n",
"import tensorflow as tf\n",
"import itertools\n",
"from sklearn.preprocessing import MinMaxScaler\n",
"from sklearn.metrics import confusion_matrix,accuracy_score,recall_score,precision_score,f1_score\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pandas as pd\n",
"from keras.layers import Input,Dropout,Dense\n",
"from keras.models import Model\n",
"from keras import regularizers\n",
"from keras.utils.data_utils import get_file\n",
"%matplotlib inline"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "rx2kw3Kb_oaS",
"outputId": "bd038d05-9785-4bed-ec2a-f35eb4c44f4e",
"colab": {
"resources": {
"http://localhost:8080/nbextensions/google.colab/files.js": {
"data": "Ly8gQ29weXJpZ2h0IDIwMTcgR29vZ2xlIExMQwovLwovLyBMaWNlbnNlZCB1bmRlciB0aGUgQXBhY2hlIExpY2Vuc2UsIFZlcnNpb24gMi4wICh0aGUgIkxpY2Vuc2UiKTsKLy8geW91IG1heSBub3QgdXNlIHRoaXMgZmlsZSBleGNlcHQgaW4gY29tcGxpYW5jZSB3aXRoIHRoZSBMaWNlbnNlLgovLyBZb3UgbWF5IG9idGFpbiBhIGNvcHkgb2YgdGhlIExpY2Vuc2UgYXQKLy8KLy8gICAgICBodHRwOi8vd3d3LmFwYWNoZS5vcmcvbGljZW5zZXMvTElDRU5TRS0yLjAKLy8KLy8gVW5sZXNzIHJlcXVpcmVkIGJ5IGFwcGxpY2FibGUgbGF3IG9yIGFncmVlZCB0byBpbiB3cml0aW5nLCBzb2Z0d2FyZQovLyBkaXN0cmlidXRlZCB1bmRlciB0aGUgTGljZW5zZSBpcyBkaXN0cmlidXRlZCBvbiBhbiAiQVMgSVMiIEJBU0lTLAovLyBXSVRIT1VUIFdBUlJBTlRJRVMgT1IgQ09ORElUSU9OUyBPRiBBTlkgS0lORCwgZWl0aGVyIGV4cHJlc3Mgb3IgaW1wbGllZC4KLy8gU2VlIHRoZSBMaWNlbnNlIGZvciB0aGUgc3BlY2lmaWMgbGFuZ3VhZ2UgZ292ZXJuaW5nIHBlcm1pc3Npb25zIGFuZAovLyBsaW1pdGF0aW9ucyB1bmRlciB0aGUgTGljZW5zZS4KCi8qKgogKiBAZmlsZW92ZXJ2aWV3IEhlbHBlcnMgZm9yIGdvb2dsZS5jb2xhYiBQeXRob24gbW9kdWxlLgogKi8KKGZ1bmN0aW9uKHNjb3BlKSB7CmZ1bmN0aW9uIHNwYW4odGV4dCwgc3R5bGVBdHRyaWJ1dGVzID0ge30pIHsKICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnc3BhbicpOwogIGVsZW1lbnQudGV4dENvbnRlbnQgPSB0ZXh0OwogIGZvciAoY29uc3Qga2V5IG9mIE9iamVjdC5rZXlzKHN0eWxlQXR0cmlidXRlcykpIHsKICAgIGVsZW1lbnQuc3R5bGVba2V5XSA9IHN0eWxlQXR0cmlidXRlc1trZXldOwogIH0KICByZXR1cm4gZWxlbWVudDsKfQoKLy8gTWF4IG51bWJlciBvZiBieXRlcyB3aGljaCB3aWxsIGJlIHVwbG9hZGVkIGF0IGEgdGltZS4KY29uc3QgTUFYX1BBWUxPQURfU0laRSA9IDEwMCAqIDEwMjQ7CgpmdW5jdGlvbiBfdXBsb2FkRmlsZXMoaW5wdXRJZCwgb3V0cHV0SWQpIHsKICBjb25zdCBzdGVwcyA9IHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCk7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICAvLyBDYWNoZSBzdGVwcyBvbiB0aGUgb3V0cHV0RWxlbWVudCB0byBtYWtlIGl0IGF2YWlsYWJsZSBmb3IgdGhlIG5leHQgY2FsbAogIC8vIHRvIHVwbG9hZEZpbGVzQ29udGludWUgZnJvbSBQeXRob24uCiAgb3V0cHV0RWxlbWVudC5zdGVwcyA9IHN0ZXBzOwoKICByZXR1cm4gX3VwbG9hZEZpbGVzQ29udGludWUob3V0cHV0SWQpOwp9CgovLyBUaGlzIGlzIHJvdWdobHkgYW4gYXN5bmMgZ2VuZXJhdG9yIChub3Qgc3VwcG9ydGVkIGluIHRoZSBicm93c2VyIHlldCksCi8vIHdoZXJlIHRoZXJlIGFyZSBtdWx0aXBsZSBhc3luY2hyb25vdXMgc3RlcHMgYW5kIHRoZSBQeXRob24gc2lkZSBpcyBnb2luZwovLyB0byBwb2xsIGZvciBjb21wbGV0aW9uIG9mIGVhY2ggc3RlcC4KLy8gVGhpcyB1c2VzIGEgUHJvbWlzZSB0byBibG9jayB0aGUgcHl0aG9uIHNpZGUgb24gY29tcGxldGlvbiBvZiBlYWNoIHN0ZXAsCi8vIHRoZW4gcGFzc2VzIHRoZSByZXN1bHQgb2YgdGhlIHByZXZpb3VzIHN0ZXAgYXMgdGhlIGlucHV0IHRvIHRoZSBuZXh0IHN0ZXAuCmZ1bmN0aW9uIF91cGxvYWRGaWxlc0NvbnRpbnVlKG91dHB1dElkKSB7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICBjb25zdCBzdGVwcyA9IG91dHB1dEVsZW1lbnQuc3RlcHM7CgogIGNvbnN0IG5leHQgPSBzdGVwcy5uZXh0KG91dHB1dEVsZW1lbnQubGFzdFByb21pc2VWYWx1ZSk7CiAgcmV0dXJuIFByb21pc2UucmVzb2x2ZShuZXh0LnZhbHVlLnByb21pc2UpLnRoZW4oKHZhbHVlKSA9PiB7CiAgICAvLyBDYWNoZSB0aGUgbGFzdCBwcm9taXNlIHZhbHVlIHRvIG1ha2UgaXQgYXZhaWxhYmxlIHRvIHRoZSBuZXh0CiAgICAvLyBzdGVwIG9mIHRoZSBnZW5lcmF0b3IuCiAgICBvdXRwdXRFbGVtZW50Lmxhc3RQcm9taXNlVmFsdWUgPSB2YWx1ZTsKICAgIHJldHVybiBuZXh0LnZhbHVlLnJlc3BvbnNlOwogIH0pOwp9CgovKioKICogR2VuZXJhdG9yIGZ1bmN0aW9uIHdoaWNoIGlzIGNhbGxlZCBiZXR3ZWVuIGVhY2ggYXN5bmMgc3RlcCBvZiB0aGUgdXBsb2FkCiAqIHByb2Nlc3MuCiAqIEBwYXJhbSB7c3RyaW5nfSBpbnB1dElkIEVsZW1lbnQgSUQgb2YgdGhlIGlucHV0IGZpbGUgcGlja2VyIGVsZW1lbnQuCiAqIEBwYXJhbSB7c3RyaW5nfSBvdXRwdXRJZCBFbGVtZW50IElEIG9mIHRoZSBvdXRwdXQgZGlzcGxheS4KICogQHJldHVybiB7IUl0ZXJhYmxlPCFPYmplY3Q+fSBJdGVyYWJsZSBvZiBuZXh0IHN0ZXBzLgogKi8KZnVuY3Rpb24qIHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCkgewogIGNvbnN0IGlucHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKGlucHV0SWQpOwogIGlucHV0RWxlbWVudC5kaXNhYmxlZCA9IGZhbHNlOwoKICBjb25zdCBvdXRwdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQob3V0cHV0SWQpOwogIG91dHB1dEVsZW1lbnQuaW5uZXJIVE1MID0gJyc7CgogIGNvbnN0IHBpY2tlZFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgaW5wdXRFbGVtZW50LmFkZEV2ZW50TGlzdGVuZXIoJ2NoYW5nZScsIChlKSA9PiB7CiAgICAgIHJlc29sdmUoZS50YXJnZXQuZmlsZXMpOwogICAgfSk7CiAgfSk7CgogIGNvbnN0IGNhbmNlbCA9IGRvY3VtZW50LmNyZWF0ZUVsZW1lbnQoJ2J1dHRvbicpOwogIGlucHV0RWxlbWVudC5wYXJlbnRFbGVtZW50LmFwcGVuZENoaWxkKGNhbmNlbCk7CiAgY2FuY2VsLnRleHRDb250ZW50ID0gJ0NhbmNlbCB1cGxvYWQnOwogIGNvbnN0IGNhbmNlbFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgY2FuY2VsLm9uY2xpY2sgPSAoKSA9PiB7CiAgICAgIHJlc29sdmUobnVsbCk7CiAgICB9OwogIH0pOwoKICAvLyBXYWl0IGZvciB0aGUgdXNlciB0byBwaWNrIHRoZSBmaWxlcy4KICBjb25zdCBmaWxlcyA9IHlpZWxkIHsKICAgIHByb21pc2U6IFByb21pc2UucmFjZShbcGlja2VkUHJvbWlzZSwgY2FuY2VsUHJvbWlzZV0pLAogICAgcmVzcG9uc2U6IHsKICAgICAgYWN0aW9uOiAnc3RhcnRpbmcnLAogICAgfQogIH07CgogIGNhbmNlbC5yZW1vdmUoKTsKCiAgLy8gRGlzYWJsZSB0aGUgaW5wdXQgZWxlbWVudCBzaW5jZSBmdXJ0aGVyIHBpY2tzIGFyZSBub3QgYWxsb3dlZC4KICBpbnB1dEVsZW1lbnQuZGlzYWJsZWQgPSB0cnVlOwoKICBpZiAoIWZpbGVzKSB7CiAgICByZXR1cm4gewogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbXBsZXRlJywKICAgICAgfQogICAgfTsKICB9CgogIGZvciAoY29uc3QgZmlsZSBvZiBmaWxlcykgewogICAgY29uc3QgbGkgPSBkb2N1bWVudC5jcmVhdGVFbGVtZW50KCdsaScpOwogICAgbGkuYXBwZW5kKHNwYW4oZmlsZS5uYW1lLCB7Zm9udFdlaWdodDogJ2JvbGQnfSkpOwogICAgbGkuYXBwZW5kKHNwYW4oCiAgICAgICAgYCgke2ZpbGUudHlwZSB8fCAnbi9hJ30pIC0gJHtmaWxlLnNpemV9IGJ5dGVzLCBgICsKICAgICAgICBgbGFzdCBtb2RpZmllZDogJHsKICAgICAgICAgICAgZmlsZS5sYXN0TW9kaWZpZWREYXRlID8gZmlsZS5sYXN0TW9kaWZpZWREYXRlLnRvTG9jYWxlRGF0ZVN0cmluZygpIDoKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgJ24vYSd9IC0gYCkpOwogICAgY29uc3QgcGVyY2VudCA9IHNwYW4oJzAlIGRvbmUnKTsKICAgIGxpLmFwcGVuZENoaWxkKHBlcmNlbnQpOwoKICAgIG91dHB1dEVsZW1lbnQuYXBwZW5kQ2hpbGQobGkpOwoKICAgIGNvbnN0IGZpbGVEYXRhUHJvbWlzZSA9IG5ldyBQcm9taXNlKChyZXNvbHZlKSA9PiB7CiAgICAgIGNvbnN0IHJlYWRlciA9IG5ldyBGaWxlUmVhZGVyKCk7CiAgICAgIHJlYWRlci5vbmxvYWQgPSAoZSkgPT4gewogICAgICAgIHJlc29sdmUoZS50YXJnZXQucmVzdWx0KTsKICAgICAgfTsKICAgICAgcmVhZGVyLnJlYWRBc0FycmF5QnVmZmVyKGZpbGUpOwogICAgfSk7CiAgICAvLyBXYWl0IGZvciB0aGUgZGF0YSB0byBiZSByZWFkeS4KICAgIGxldCBmaWxlRGF0YSA9IHlpZWxkIHsKICAgICAgcHJvbWlzZTogZmlsZURhdGFQcm9taXNlLAogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbnRpbnVlJywKICAgICAgfQogICAgfTsKCiAgICAvLyBVc2UgYSBjaHVua2VkIHNlbmRpbmcgdG8gYXZvaWQgbWVzc2FnZSBzaXplIGxpbWl0cy4gU2VlIGIvNjIxMTU2NjAuCiAgICBsZXQgcG9zaXRpb24gPSAwOwogICAgd2hpbGUgKHBvc2l0aW9uIDwgZmlsZURhdGEuYnl0ZUxlbmd0aCkgewogICAgICBjb25zdCBsZW5ndGggPSBNYXRoLm1pbihmaWxlRGF0YS5ieXRlTGVuZ3RoIC0gcG9zaXRpb24sIE1BWF9QQVlMT0FEX1NJWkUpOwogICAgICBjb25zdCBjaHVuayA9IG5ldyBVaW50OEFycmF5KGZpbGVEYXRhLCBwb3NpdGlvbiwgbGVuZ3RoKTsKICAgICAgcG9zaXRpb24gKz0gbGVuZ3RoOwoKICAgICAgY29uc3QgYmFzZTY0ID0gYnRvYShTdHJpbmcuZnJvbUNoYXJDb2RlLmFwcGx5KG51bGwsIGNodW5rKSk7CiAgICAgIHlpZWxkIHsKICAgICAgICByZXNwb25zZTogewogICAgICAgICAgYWN0aW9uOiAnYXBwZW5kJywKICAgICAgICAgIGZpbGU6IGZpbGUubmFtZSwKICAgICAgICAgIGRhdGE6IGJhc2U2NCwKICAgICAgICB9LAogICAgICB9OwogICAgICBwZXJjZW50LnRleHRDb250ZW50ID0KICAgICAgICAgIGAke01hdGgucm91bmQoKHBvc2l0aW9uIC8gZmlsZURhdGEuYnl0ZUxlbmd0aCkgKiAxMDApfSUgZG9uZWA7CiAgICB9CiAgfQoKICAvLyBBbGwgZG9uZS4KICB5aWVsZCB7CiAgICByZXNwb25zZTogewogICAgICBhY3Rpb246ICdjb21wbGV0ZScsCiAgICB9CiAgfTsKfQoKc2NvcGUuZ29vZ2xlID0gc2NvcGUuZ29vZ2xlIHx8IHt9OwpzY29wZS5nb29nbGUuY29sYWIgPSBzY29wZS5nb29nbGUuY29sYWIgfHwge307CnNjb3BlLmdvb2dsZS5jb2xhYi5fZmlsZXMgPSB7CiAgX3VwbG9hZEZpbGVzLAogIF91cGxvYWRGaWxlc0NvbnRpbnVlLAp9Owp9KShzZWxmKTsK",
"ok": true,
"headers": [
[
"content-type",
"application/javascript"
]
],
"status": 200,
"status_text": ""
}
},
"base_uri": "https://localhost:8080/",
"height": 73
}
},
"source": [
"from google.colab import files\n",
"uploaded = files.upload()"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/html": [
"\n",
" \n",
" \n",
" "
],
"text/plain": [
""
]
},
"metadata": {
"tags": []
}
},
{
"output_type": "stream",
"text": [
"Saving KDDTrain.csv to KDDTrain.csv\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "nJb1IFUZ_rFU"
},
"source": [
"import io\n",
"training_df = pd.read_csv(io.BytesIO(uploaded['KDDTrain.csv']))"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "j_lxszVi_tdb",
"outputId": "0ee45ce2-cf62-40b2-e647-7e12b2943a8d",
"colab": {
"resources": {
"http://localhost:8080/nbextensions/google.colab/files.js": {
"data": "Ly8gQ29weXJpZ2h0IDIwMTcgR29vZ2xlIExMQwovLwovLyBMaWNlbnNlZCB1bmRlciB0aGUgQXBhY2hlIExpY2Vuc2UsIFZlcnNpb24gMi4wICh0aGUgIkxpY2Vuc2UiKTsKLy8geW91IG1heSBub3QgdXNlIHRoaXMgZmlsZSBleGNlcHQgaW4gY29tcGxpYW5jZSB3aXRoIHRoZSBMaWNlbnNlLgovLyBZb3UgbWF5IG9idGFpbiBhIGNvcHkgb2YgdGhlIExpY2Vuc2UgYXQKLy8KLy8gICAgICBodHRwOi8vd3d3LmFwYWNoZS5vcmcvbGljZW5zZXMvTElDRU5TRS0yLjAKLy8KLy8gVW5sZXNzIHJlcXVpcmVkIGJ5IGFwcGxpY2FibGUgbGF3IG9yIGFncmVlZCB0byBpbiB3cml0aW5nLCBzb2Z0d2FyZQovLyBkaXN0cmlidXRlZCB1bmRlciB0aGUgTGljZW5zZSBpcyBkaXN0cmlidXRlZCBvbiBhbiAiQVMgSVMiIEJBU0lTLAovLyBXSVRIT1VUIFdBUlJBTlRJRVMgT1IgQ09ORElUSU9OUyBPRiBBTlkgS0lORCwgZWl0aGVyIGV4cHJlc3Mgb3IgaW1wbGllZC4KLy8gU2VlIHRoZSBMaWNlbnNlIGZvciB0aGUgc3BlY2lmaWMgbGFuZ3VhZ2UgZ292ZXJuaW5nIHBlcm1pc3Npb25zIGFuZAovLyBsaW1pdGF0aW9ucyB1bmRlciB0aGUgTGljZW5zZS4KCi8qKgogKiBAZmlsZW92ZXJ2aWV3IEhlbHBlcnMgZm9yIGdvb2dsZS5jb2xhYiBQeXRob24gbW9kdWxlLgogKi8KKGZ1bmN0aW9uKHNjb3BlKSB7CmZ1bmN0aW9uIHNwYW4odGV4dCwgc3R5bGVBdHRyaWJ1dGVzID0ge30pIHsKICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnc3BhbicpOwogIGVsZW1lbnQudGV4dENvbnRlbnQgPSB0ZXh0OwogIGZvciAoY29uc3Qga2V5IG9mIE9iamVjdC5rZXlzKHN0eWxlQXR0cmlidXRlcykpIHsKICAgIGVsZW1lbnQuc3R5bGVba2V5XSA9IHN0eWxlQXR0cmlidXRlc1trZXldOwogIH0KICByZXR1cm4gZWxlbWVudDsKfQoKLy8gTWF4IG51bWJlciBvZiBieXRlcyB3aGljaCB3aWxsIGJlIHVwbG9hZGVkIGF0IGEgdGltZS4KY29uc3QgTUFYX1BBWUxPQURfU0laRSA9IDEwMCAqIDEwMjQ7CgpmdW5jdGlvbiBfdXBsb2FkRmlsZXMoaW5wdXRJZCwgb3V0cHV0SWQpIHsKICBjb25zdCBzdGVwcyA9IHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCk7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICAvLyBDYWNoZSBzdGVwcyBvbiB0aGUgb3V0cHV0RWxlbWVudCB0byBtYWtlIGl0IGF2YWlsYWJsZSBmb3IgdGhlIG5leHQgY2FsbAogIC8vIHRvIHVwbG9hZEZpbGVzQ29udGludWUgZnJvbSBQeXRob24uCiAgb3V0cHV0RWxlbWVudC5zdGVwcyA9IHN0ZXBzOwoKICByZXR1cm4gX3VwbG9hZEZpbGVzQ29udGludWUob3V0cHV0SWQpOwp9CgovLyBUaGlzIGlzIHJvdWdobHkgYW4gYXN5bmMgZ2VuZXJhdG9yIChub3Qgc3VwcG9ydGVkIGluIHRoZSBicm93c2VyIHlldCksCi8vIHdoZXJlIHRoZXJlIGFyZSBtdWx0aXBsZSBhc3luY2hyb25vdXMgc3RlcHMgYW5kIHRoZSBQeXRob24gc2lkZSBpcyBnb2luZwovLyB0byBwb2xsIGZvciBjb21wbGV0aW9uIG9mIGVhY2ggc3RlcC4KLy8gVGhpcyB1c2VzIGEgUHJvbWlzZSB0byBibG9jayB0aGUgcHl0aG9uIHNpZGUgb24gY29tcGxldGlvbiBvZiBlYWNoIHN0ZXAsCi8vIHRoZW4gcGFzc2VzIHRoZSByZXN1bHQgb2YgdGhlIHByZXZpb3VzIHN0ZXAgYXMgdGhlIGlucHV0IHRvIHRoZSBuZXh0IHN0ZXAuCmZ1bmN0aW9uIF91cGxvYWRGaWxlc0NvbnRpbnVlKG91dHB1dElkKSB7CiAgY29uc3Qgb3V0cHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKG91dHB1dElkKTsKICBjb25zdCBzdGVwcyA9IG91dHB1dEVsZW1lbnQuc3RlcHM7CgogIGNvbnN0IG5leHQgPSBzdGVwcy5uZXh0KG91dHB1dEVsZW1lbnQubGFzdFByb21pc2VWYWx1ZSk7CiAgcmV0dXJuIFByb21pc2UucmVzb2x2ZShuZXh0LnZhbHVlLnByb21pc2UpLnRoZW4oKHZhbHVlKSA9PiB7CiAgICAvLyBDYWNoZSB0aGUgbGFzdCBwcm9taXNlIHZhbHVlIHRvIG1ha2UgaXQgYXZhaWxhYmxlIHRvIHRoZSBuZXh0CiAgICAvLyBzdGVwIG9mIHRoZSBnZW5lcmF0b3IuCiAgICBvdXRwdXRFbGVtZW50Lmxhc3RQcm9taXNlVmFsdWUgPSB2YWx1ZTsKICAgIHJldHVybiBuZXh0LnZhbHVlLnJlc3BvbnNlOwogIH0pOwp9CgovKioKICogR2VuZXJhdG9yIGZ1bmN0aW9uIHdoaWNoIGlzIGNhbGxlZCBiZXR3ZWVuIGVhY2ggYXN5bmMgc3RlcCBvZiB0aGUgdXBsb2FkCiAqIHByb2Nlc3MuCiAqIEBwYXJhbSB7c3RyaW5nfSBpbnB1dElkIEVsZW1lbnQgSUQgb2YgdGhlIGlucHV0IGZpbGUgcGlja2VyIGVsZW1lbnQuCiAqIEBwYXJhbSB7c3RyaW5nfSBvdXRwdXRJZCBFbGVtZW50IElEIG9mIHRoZSBvdXRwdXQgZGlzcGxheS4KICogQHJldHVybiB7IUl0ZXJhYmxlPCFPYmplY3Q+fSBJdGVyYWJsZSBvZiBuZXh0IHN0ZXBzLgogKi8KZnVuY3Rpb24qIHVwbG9hZEZpbGVzU3RlcChpbnB1dElkLCBvdXRwdXRJZCkgewogIGNvbnN0IGlucHV0RWxlbWVudCA9IGRvY3VtZW50LmdldEVsZW1lbnRCeUlkKGlucHV0SWQpOwogIGlucHV0RWxlbWVudC5kaXNhYmxlZCA9IGZhbHNlOwoKICBjb25zdCBvdXRwdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQob3V0cHV0SWQpOwogIG91dHB1dEVsZW1lbnQuaW5uZXJIVE1MID0gJyc7CgogIGNvbnN0IHBpY2tlZFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgaW5wdXRFbGVtZW50LmFkZEV2ZW50TGlzdGVuZXIoJ2NoYW5nZScsIChlKSA9PiB7CiAgICAgIHJlc29sdmUoZS50YXJnZXQuZmlsZXMpOwogICAgfSk7CiAgfSk7CgogIGNvbnN0IGNhbmNlbCA9IGRvY3VtZW50LmNyZWF0ZUVsZW1lbnQoJ2J1dHRvbicpOwogIGlucHV0RWxlbWVudC5wYXJlbnRFbGVtZW50LmFwcGVuZENoaWxkKGNhbmNlbCk7CiAgY2FuY2VsLnRleHRDb250ZW50ID0gJ0NhbmNlbCB1cGxvYWQnOwogIGNvbnN0IGNhbmNlbFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgY2FuY2VsLm9uY2xpY2sgPSAoKSA9PiB7CiAgICAgIHJlc29sdmUobnVsbCk7CiAgICB9OwogIH0pOwoKICAvLyBXYWl0IGZvciB0aGUgdXNlciB0byBwaWNrIHRoZSBmaWxlcy4KICBjb25zdCBmaWxlcyA9IHlpZWxkIHsKICAgIHByb21pc2U6IFByb21pc2UucmFjZShbcGlja2VkUHJvbWlzZSwgY2FuY2VsUHJvbWlzZV0pLAogICAgcmVzcG9uc2U6IHsKICAgICAgYWN0aW9uOiAnc3RhcnRpbmcnLAogICAgfQogIH07CgogIGNhbmNlbC5yZW1vdmUoKTsKCiAgLy8gRGlzYWJsZSB0aGUgaW5wdXQgZWxlbWVudCBzaW5jZSBmdXJ0aGVyIHBpY2tzIGFyZSBub3QgYWxsb3dlZC4KICBpbnB1dEVsZW1lbnQuZGlzYWJsZWQgPSB0cnVlOwoKICBpZiAoIWZpbGVzKSB7CiAgICByZXR1cm4gewogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbXBsZXRlJywKICAgICAgfQogICAgfTsKICB9CgogIGZvciAoY29uc3QgZmlsZSBvZiBmaWxlcykgewogICAgY29uc3QgbGkgPSBkb2N1bWVudC5jcmVhdGVFbGVtZW50KCdsaScpOwogICAgbGkuYXBwZW5kKHNwYW4oZmlsZS5uYW1lLCB7Zm9udFdlaWdodDogJ2JvbGQnfSkpOwogICAgbGkuYXBwZW5kKHNwYW4oCiAgICAgICAgYCgke2ZpbGUudHlwZSB8fCAnbi9hJ30pIC0gJHtmaWxlLnNpemV9IGJ5dGVzLCBgICsKICAgICAgICBgbGFzdCBtb2RpZmllZDogJHsKICAgICAgICAgICAgZmlsZS5sYXN0TW9kaWZpZWREYXRlID8gZmlsZS5sYXN0TW9kaWZpZWREYXRlLnRvTG9jYWxlRGF0ZVN0cmluZygpIDoKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgJ24vYSd9IC0gYCkpOwogICAgY29uc3QgcGVyY2VudCA9IHNwYW4oJzAlIGRvbmUnKTsKICAgIGxpLmFwcGVuZENoaWxkKHBlcmNlbnQpOwoKICAgIG91dHB1dEVsZW1lbnQuYXBwZW5kQ2hpbGQobGkpOwoKICAgIGNvbnN0IGZpbGVEYXRhUHJvbWlzZSA9IG5ldyBQcm9taXNlKChyZXNvbHZlKSA9PiB7CiAgICAgIGNvbnN0IHJlYWRlciA9IG5ldyBGaWxlUmVhZGVyKCk7CiAgICAgIHJlYWRlci5vbmxvYWQgPSAoZSkgPT4gewogICAgICAgIHJlc29sdmUoZS50YXJnZXQucmVzdWx0KTsKICAgICAgfTsKICAgICAgcmVhZGVyLnJlYWRBc0FycmF5QnVmZmVyKGZpbGUpOwogICAgfSk7CiAgICAvLyBXYWl0IGZvciB0aGUgZGF0YSB0byBiZSByZWFkeS4KICAgIGxldCBmaWxlRGF0YSA9IHlpZWxkIHsKICAgICAgcHJvbWlzZTogZmlsZURhdGFQcm9taXNlLAogICAgICByZXNwb25zZTogewogICAgICAgIGFjdGlvbjogJ2NvbnRpbnVlJywKICAgICAgfQogICAgfTsKCiAgICAvLyBVc2UgYSBjaHVua2VkIHNlbmRpbmcgdG8gYXZvaWQgbWVzc2FnZSBzaXplIGxpbWl0cy4gU2VlIGIvNjIxMTU2NjAuCiAgICBsZXQgcG9zaXRpb24gPSAwOwogICAgd2hpbGUgKHBvc2l0aW9uIDwgZmlsZURhdGEuYnl0ZUxlbmd0aCkgewogICAgICBjb25zdCBsZW5ndGggPSBNYXRoLm1pbihmaWxlRGF0YS5ieXRlTGVuZ3RoIC0gcG9zaXRpb24sIE1BWF9QQVlMT0FEX1NJWkUpOwogICAgICBjb25zdCBjaHVuayA9IG5ldyBVaW50OEFycmF5KGZpbGVEYXRhLCBwb3NpdGlvbiwgbGVuZ3RoKTsKICAgICAgcG9zaXRpb24gKz0gbGVuZ3RoOwoKICAgICAgY29uc3QgYmFzZTY0ID0gYnRvYShTdHJpbmcuZnJvbUNoYXJDb2RlLmFwcGx5KG51bGwsIGNodW5rKSk7CiAgICAgIHlpZWxkIHsKICAgICAgICByZXNwb25zZTogewogICAgICAgICAgYWN0aW9uOiAnYXBwZW5kJywKICAgICAgICAgIGZpbGU6IGZpbGUubmFtZSwKICAgICAgICAgIGRhdGE6IGJhc2U2NCwKICAgICAgICB9LAogICAgICB9OwogICAgICBwZXJjZW50LnRleHRDb250ZW50ID0KICAgICAgICAgIGAke01hdGgucm91bmQoKHBvc2l0aW9uIC8gZmlsZURhdGEuYnl0ZUxlbmd0aCkgKiAxMDApfSUgZG9uZWA7CiAgICB9CiAgfQoKICAvLyBBbGwgZG9uZS4KICB5aWVsZCB7CiAgICByZXNwb25zZTogewogICAgICBhY3Rpb246ICdjb21wbGV0ZScsCiAgICB9CiAgfTsKfQoKc2NvcGUuZ29vZ2xlID0gc2NvcGUuZ29vZ2xlIHx8IHt9OwpzY29wZS5nb29nbGUuY29sYWIgPSBzY29wZS5nb29nbGUuY29sYWIgfHwge307CnNjb3BlLmdvb2dsZS5jb2xhYi5fZmlsZXMgPSB7CiAgX3VwbG9hZEZpbGVzLAogIF91cGxvYWRGaWxlc0NvbnRpbnVlLAp9Owp9KShzZWxmKTsK",
"ok": true,
"headers": [
[
"content-type",
"application/javascript"
]
],
"status": 200,
"status_text": ""
}
},
"base_uri": "https://localhost:8080/",
"height": 73
}
},
"source": [
"uploaded = files.upload()"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/html": [
"\n",
" \n",
" \n",
" "
],
"text/plain": [
""
]
},
"metadata": {
"tags": []
}
},
{
"output_type": "stream",
"text": [
"Saving KDDTest.csv to KDDTest.csv\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "Rdvg0Sdx_1M0"
},
"source": [
"testing_df = pd.read_csv(io.BytesIO(uploaded['KDDTest.csv']))"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "klI0nw7y_7nr"
},
"source": [
"training_dfn=training_df\n",
"testing_dfn=testing_df"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "9MVHB6MD_-05"
},
"source": [
"training_dfn.replace('?', np.nan, inplace=True)\n",
"training_dfn.dropna(inplace=True)\n",
"testing_dfn.replace('?', np.nan, inplace=True)\n",
"testing_dfn.dropna(inplace=True)"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "Nr7Qs-K6ACEF",
"outputId": "d3983bff-6068-482e-a395-e9384a857fda",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 223
}
},
"source": [
"testing_dfn.head()\n"
],
"execution_count": null,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/html": [
"\n",
"\n",
"
\n",
" \n",
" \n",
" | \n",
" duration | \n",
" protocol_type | \n",
" service | \n",
" flag | \n",
" src_bytes | \n",
" dst_bytes | \n",
" land | \n",
" wrong_fragment | \n",
" urgent | \n",
" hot | \n",
" num_failed_logins | \n",
" logged_in | \n",
" num_compromised | \n",
" root_shell | \n",
" su_attempted | \n",
" num_root | \n",
" num_file_creations | \n",
" num_shells | \n",
" num_access_files | \n",
" num_outbound_cmds | \n",
" is_host_login | \n",
" is_guest_login | \n",
" count | \n",
" srv_count | \n",
" serror_rate | \n",
" srv_serror_rate | \n",
" rerror_rate | \n",
" srv_rerror_rate | \n",
" same_srv_rate | \n",
" diff_srv_rate | \n",
" srv_diff_host_rate | \n",
" dst_host_count | \n",
" dst_host_srv_count | \n",
" dst_host_same_srv_rate | \n",
" dst_host_diff_srv_rate | \n",
" dst_host_same_src_port_rate | \n",
" dst_host_srv_diff_host_rate | \n",
" dst_host_serror_rate | \n",
" dst_host_srv_serror_rate | \n",
" dst_host_rerror_rate | \n",
" dst_host_srv_rerror_rate | \n",
" xAttack | \n",
"
\n",
" \n",
" \n",
" \n",
" 0 | \n",
" 0 | \n",
" 1 | \n",
" 50 | \n",
" 1 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 229 | \n",
" 10 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 1.0 | \n",
" 1.0 | \n",
" 0.04 | \n",
" 0.06 | \n",
" 0.00 | \n",
" 255 | \n",
" 10 | \n",
" 0.04 | \n",
" 0.06 | \n",
" 0.00 | \n",
" 0.00 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 1.00 | \n",
" 1.00 | \n",
" 1 | \n",
"
\n",
" \n",
" 1 | \n",
" 0 | \n",
" 1 | \n",
" 50 | \n",
" 1 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 136 | \n",
" 1 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 1.0 | \n",
" 1.0 | \n",
" 0.01 | \n",
" 0.06 | \n",
" 0.00 | \n",
" 255 | \n",
" 1 | \n",
" 0.00 | \n",
" 0.06 | \n",
" 0.00 | \n",
" 0.00 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 1.00 | \n",
" 1.00 | \n",
" 1 | \n",
"
\n",
" \n",
" 2 | \n",
" 2 | \n",
" 1 | \n",
" 20 | \n",
" 2 | \n",
" 12983 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 1 | \n",
" 1 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 1.00 | \n",
" 0.00 | \n",
" 0.00 | \n",
" 134 | \n",
" 86 | \n",
" 0.61 | \n",
" 0.04 | \n",
" 0.61 | \n",
" 0.02 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 0.00 | \n",
" 0 | \n",
"
\n",
" \n",
" 3 | \n",
" 0 | \n",
" 2 | \n",
" 15 | \n",
" 2 | \n",
" 20 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 1 | \n",
" 65 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 1.00 | \n",
" 0.00 | \n",
" 1.00 | \n",
" 3 | \n",
" 57 | \n",
" 1.00 | \n",
" 0.00 | \n",
" 1.00 | \n",
" 0.28 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 0.00 | \n",
" 0.00 | \n",
" 1 | \n",
"
\n",
" \n",
" 4 | \n",
" 1 | \n",
" 1 | \n",
" 61 | \n",
" 3 | \n",
" 0 | \n",
" 15 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 0 | \n",
" 1 | \n",
" 8 | \n",
" 0.0 | \n",
" 0.12 | \n",
" 1.0 | \n",
" 0.5 | \n",
" 1.00 | \n",
" 0.00 | \n",
" 0.75 | \n",
" 29 | \n",
" 86 | \n",
" 0.31 | \n",
" 0.17 | \n",
" 0.03 | \n",
" 0.02 | \n",
" 0.0 | \n",
" 0.0 | \n",
" 0.83 | \n",
" 0.71 | \n",
" 1 | \n",
"
\n",
" \n",
"
\n",
"
"
],
"text/plain": [
" duration protocol_type ... dst_host_srv_rerror_rate xAttack\n",
"0 0 1 ... 1.00 1\n",
"1 0 1 ... 1.00 1\n",
"2 2 1 ... 0.00 0\n",
"3 0 2 ... 0.00 1\n",
"4 1 1 ... 0.71 1\n",
"\n",
"[5 rows x 42 columns]"
]
},
"metadata": {
"tags": []
},
"execution_count": 16
}
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "VZ0dpWz6FLyc"
},
"source": [
""
]
},
{
"cell_type": "code",
"metadata": {
"id": "mEdi-fmMAEiA"
},
"source": [
"x,Y=training_dfn,training_dfn.pop(\"xAttack\").values\n",
"X=x.values\n",
"x_test,C=testing_dfn,testing_dfn.pop(\"xAttack\").values\n",
"T=x_test.values"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "yo-vktbSAJJn"
},
"source": [
"from sklearn.preprocessing import StandardScaler, MinMaxScaler\n",
"scaler = MinMaxScaler()\n",
"scaler.fit(X)\n",
"trainX = scaler.transform(X)\n",
"scaler.fit(T)\n",
"testT = scaler.transform(T)"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "0PljJYomALhJ"
},
"source": [
"y0=np.ones(len(Y),np.int8)\n",
"y0[np.where(Y==0)]=0\n",
"y0_test=np.ones(len(C),np.int8)\n",
"y0_test[np.where(C==0)]=0"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "Yw98w8aNm5qb"
},
"source": [
"###########################################################Stacked Autencoder ##################################################3"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "g6wk3fBqYy13"
},
"source": [
"# Stacked Autoencoder \n",
"from keras import models\n",
"from keras import layers\n",
"def create_stack_network():\n",
" inputs = Input(shape=(41,))\n",
" encoded = Dense(41, activation='tanh')(inputs)\n",
" encoded = Dense(32, activation='tanh')(encoded)\n",
" encoded = Dense(24, activation='tanh')(encoded)\n",
" \n",
" encoded = Dense(16, activation='tanh')(encoded)\n",
"\n",
" decoded = Dense(24, activation='tanh')(encoded)\n",
" decoded = Dense(32, activation='tanh')(decoded)\n",
" decoded = Dense(41, activation='tanh')(decoded)\n",
" autoencoder=Model(inputs,decoded)\n",
" autoencoder.compile(optimizer='adam',loss='mean_squared_error')\n",
" return autoencoder"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "x3bNT2XRnuIP",
"outputId": "ed04e526-31d9-4fd5-f8e6-17898e6f7e1b",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"from sklearn.model_selection import KFold\n",
"\n",
"n_split=3\n",
"cvscores = []\n",
"hist=[]\n",
"NtrainX=trainX[np.where(y0==0)]\n",
"for train_index,test_index in KFold(n_split).split(NtrainX):\n",
" x_train,x_test=NtrainX[train_index],NtrainX[test_index]\n",
" model=create_stack_network()\n",
" history=model.fit(x_train, x_train,validation_data=[testT,testT], epochs=20,batch_size=128)\n",
" hist.append(history)\n",
" scores = model.evaluate(testT, testT)\n",
" cvscores.append(scores * 100)\n",
" print(\". : %.2f%%\" % (scores*100))"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0162 - val_loss: 0.0241\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0042 - val_loss: 0.0161\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0025 - val_loss: 0.0129\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0019 - val_loss: 0.0120\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0016 - val_loss: 0.0112\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0015 - val_loss: 0.0107\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 0.0014 - val_loss: 0.0104\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0013 - val_loss: 0.0111\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0012 - val_loss: 0.0117\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 0.0011 - val_loss: 0.0116\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0010 - val_loss: 0.0110\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 9.4649e-04 - val_loss: 0.0108\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 8.9990e-04 - val_loss: 0.0107\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 8.6715e-04 - val_loss: 0.0100\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 8.3756e-04 - val_loss: 0.0097\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 8.1255e-04 - val_loss: 0.0094\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 7.8970e-04 - val_loss: 0.0091\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.6930e-04 - val_loss: 0.0086\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.4665e-04 - val_loss: 0.0085\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.2720e-04 - val_loss: 0.0082\n",
". : 0.82%\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0147 - val_loss: 0.0206\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0037 - val_loss: 0.0149\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0025 - val_loss: 0.0134\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0020 - val_loss: 0.0131\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 0.0017 - val_loss: 0.0125\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0015 - val_loss: 0.0124\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0013 - val_loss: 0.0118\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0012 - val_loss: 0.0123\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 0.0011 - val_loss: 0.0118\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0010 - val_loss: 0.0111\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 9.3696e-04 - val_loss: 0.0099\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 8.8023e-04 - val_loss: 0.0089\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 17us/sample - loss: 8.3932e-04 - val_loss: 0.0083\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 8.0344e-04 - val_loss: 0.0080\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.7349e-04 - val_loss: 0.0081\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.4523e-04 - val_loss: 0.0073\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.1704e-04 - val_loss: 0.0074\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.9160e-04 - val_loss: 0.0071\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.6983e-04 - val_loss: 0.0071\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.4822e-04 - val_loss: 0.0069\n",
". : 0.69%\n",
"Train on 44896 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44896/44896 [==============================] - 1s 24us/sample - loss: 0.0186 - val_loss: 0.0252\n",
"Epoch 2/20\n",
"44896/44896 [==============================] - 1s 23us/sample - loss: 0.0047 - val_loss: 0.0177\n",
"Epoch 3/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0030 - val_loss: 0.0140\n",
"Epoch 4/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0022 - val_loss: 0.0111\n",
"Epoch 5/20\n",
"44896/44896 [==============================] - 1s 24us/sample - loss: 0.0018 - val_loss: 0.0106\n",
"Epoch 6/20\n",
"44896/44896 [==============================] - 1s 23us/sample - loss: 0.0016 - val_loss: 0.0098\n",
"Epoch 7/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 0.0014 - val_loss: 0.0095\n",
"Epoch 8/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 0.0012 - val_loss: 0.0090\n",
"Epoch 9/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 0.0011 - val_loss: 0.0084\n",
"Epoch 10/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 0.0010 - val_loss: 0.0081\n",
"Epoch 11/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 9.7447e-04 - val_loss: 0.0077\n",
"Epoch 12/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 9.2274e-04 - val_loss: 0.0074\n",
"Epoch 13/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 8.8481e-04 - val_loss: 0.0078\n",
"Epoch 14/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 8.4194e-04 - val_loss: 0.0080\n",
"Epoch 15/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 8.1095e-04 - val_loss: 0.0081\n",
"Epoch 16/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 7.7564e-04 - val_loss: 0.0083\n",
"Epoch 17/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 7.4401e-04 - val_loss: 0.0083\n",
"Epoch 18/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 6.9965e-04 - val_loss: 0.0082\n",
"Epoch 19/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 6.6332e-04 - val_loss: 0.0078\n",
"Epoch 20/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 6.3278e-04 - val_loss: 0.0084\n",
". : 0.84%\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "sSbLoYIwnAx-"
},
"source": [
"####################################################Sparse Autoencocer ###################################################3"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "a9lbZsdGklJF"
},
"source": [
"# Sparse Autoencoder \n",
"from keras import models\n",
"from keras import layers\n",
"from keras import regularizers\n",
"\n",
"def create_sparse_network():\n",
" inputs = Input(shape=(41,))\n",
" encoded = Dense(41, activation='tanh',activity_regularizer=regularizers.l1(10e-6))(inputs)\n",
" encoded = Dense(32, activation='tanh',activity_regularizer=regularizers.l1(10e-6))(encoded)\n",
" encoded = Dense(24, activation='tanh',activity_regularizer=regularizers.l1(10e-5))(encoded)\n",
" encoded = Dense(16, activation='tanh',activity_regularizer=regularizers.l1(10e-5))(encoded)\n",
"\n",
" decoded = Dense(24, activation='tanh')(encoded)\n",
" decoded = Dense(32, activation='tanh')(decoded)\n",
" decoded = Dense(41, activation='tanh')(decoded)\n",
" autoencoder=Model(inputs,decoded)\n",
" autoencoder.compile(optimizer='adam',loss='mean_squared_error')\n",
" return autoencoder"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "HeHyIRMpGqST",
"outputId": "b65afcba-ae96-446a-9159-a9d3279358ce",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"from sklearn.model_selection import KFold\n",
"\n",
"n_split=3\n",
"cvscores = []\n",
"hist=[]\n",
"NtrainX=trainX[np.where(y0==0)]\n",
"for train_index,test_index in KFold(n_split).split(NtrainX):\n",
" x_train,x_test=NtrainX[train_index],NtrainX[test_index]\n",
" model=create_sparse_network()\n",
" history=model.fit(x_train, x_train,validation_data=[testT,testT], epochs=20,batch_size=128)\n",
" hist.append(history)\n",
" scores = model.evaluate(testT, testT)\n",
" cvscores.append(scores * 100)\n",
" print(\". : %.2f%%\" % (scores*100))\n",
" \n"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 28us/sample - loss: 0.0180 - val_loss: 0.0276\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 26us/sample - loss: 0.0051 - val_loss: 0.0195\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0035 - val_loss: 0.0156\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0028 - val_loss: 0.0128\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0024 - val_loss: 0.0106\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0021 - val_loss: 0.0094\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0019 - val_loss: 0.0085\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0018 - val_loss: 0.0085\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0017 - val_loss: 0.0079\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0015 - val_loss: 0.0077\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0015 - val_loss: 0.0077\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0014 - val_loss: 0.0078\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0013 - val_loss: 0.0076\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0013 - val_loss: 0.0079\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 0.0012 - val_loss: 0.0073\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0012 - val_loss: 0.0078\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0011 - val_loss: 0.0074\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0073\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0074\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0010 - val_loss: 0.0076\n",
". : 0.76%\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0196 - val_loss: 0.0250\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 0.0050 - val_loss: 0.0187\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0036 - val_loss: 0.0157\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0029 - val_loss: 0.0119\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0024 - val_loss: 0.0104\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0021 - val_loss: 0.0100\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0019 - val_loss: 0.0093\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0017 - val_loss: 0.0091\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0016 - val_loss: 0.0082\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 0.0015 - val_loss: 0.0078\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0014 - val_loss: 0.0082\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 22us/sample - loss: 0.0014 - val_loss: 0.0079\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0013 - val_loss: 0.0076\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 0.0013 - val_loss: 0.0077\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0013 - val_loss: 0.0075\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0012 - val_loss: 0.0080\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0012 - val_loss: 0.0076\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0012 - val_loss: 0.0073\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 0.0011 - val_loss: 0.0078\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0071\n",
". : 0.71%\n",
"Train on 44896 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0194 - val_loss: 0.0237\n",
"Epoch 2/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0049 - val_loss: 0.0162\n",
"Epoch 3/20\n",
"44896/44896 [==============================] - 1s 22us/sample - loss: 0.0032 - val_loss: 0.0118\n",
"Epoch 4/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0025 - val_loss: 0.0108\n",
"Epoch 5/20\n",
"44896/44896 [==============================] - 1s 22us/sample - loss: 0.0021 - val_loss: 0.0091\n",
"Epoch 6/20\n",
"44896/44896 [==============================] - 1s 22us/sample - loss: 0.0018 - val_loss: 0.0086\n",
"Epoch 7/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0017 - val_loss: 0.0079\n",
"Epoch 8/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0016 - val_loss: 0.0075\n",
"Epoch 9/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0015 - val_loss: 0.0075\n",
"Epoch 10/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0015 - val_loss: 0.0077\n",
"Epoch 11/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0014 - val_loss: 0.0072\n",
"Epoch 12/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0013 - val_loss: 0.0065\n",
"Epoch 13/20\n",
"44896/44896 [==============================] - 1s 22us/sample - loss: 0.0012 - val_loss: 0.0064\n",
"Epoch 14/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 0.0012 - val_loss: 0.0066\n",
"Epoch 15/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0066\n",
"Epoch 16/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0063\n",
"Epoch 17/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0066\n",
"Epoch 18/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0065\n",
"Epoch 19/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0011 - val_loss: 0.0073\n",
"Epoch 20/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0010 - val_loss: 0.0068\n",
". : 0.68%\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "sAYo5juOnO_N"
},
"source": [
"######################################################Denoising Autoencoder###############################################"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "fy3wz1xP1d6Z"
},
"source": [
"#Denoising Autoencoder\n",
"noise_factor = 0.1\n",
"x_train_noisy = trainX + noise_factor * np.random.normal(loc=0.0, scale=1.0, size=trainX.shape)\n",
"x_test_noisy = testT + noise_factor * np.random.normal(loc=0.0, scale=1.0, size=testT.shape)\n",
"x_train_noisy = np.clip(x_train_noisy, 0., 1.)\n",
"x_test_noisy = np.clip(x_test_noisy, 0., 1.)\n"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "9R5kMTv51woF",
"outputId": "1579d8b3-e83a-48d6-c48f-d91e5105edb1",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"from sklearn.model_selection import KFold\n",
"\n",
"n_split=3\n",
"cvscores = []\n",
"hist=[]\n",
"NtrainX=trainX[np.where(y0==0)]\n",
"NtrainX_noisy=x_train_noisy[np.where(y0==0)]\n",
"\n",
"for train_index,test_index in KFold(n_split).split(NtrainX):\n",
" x_train,x_test=NtrainX[train_index],NtrainX[test_index]\n",
" x_train_noisy2=NtrainX_noisy[train_index]\n",
" model=create_stack_network()\n",
" history=model.fit(x_train_noisy2, x_train,validation_data=[x_test_noisy,testT], epochs=20,batch_size=128)\n",
" hist.append(history)\n",
" scores = model.evaluate(testT, testT)\n",
" cvscores.append(scores * 100)\n",
" print(\". : %.2f%%\" % (scores*100))"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 2s 34us/sample - loss: 0.0182 - val_loss: 0.0282\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 26us/sample - loss: 0.0058 - val_loss: 0.0193\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0042 - val_loss: 0.0153\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0037 - val_loss: 0.0139\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0033 - val_loss: 0.0133\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0030 - val_loss: 0.0128\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0028 - val_loss: 0.0124\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0026 - val_loss: 0.0119\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0024 - val_loss: 0.0117\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0023 - val_loss: 0.0115\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0022 - val_loss: 0.0112\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0020 - val_loss: 0.0107\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0020 - val_loss: 0.0102\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0019 - val_loss: 0.0097\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 26us/sample - loss: 0.0018 - val_loss: 0.0100\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0018 - val_loss: 0.0094\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0017 - val_loss: 0.0092\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0017 - val_loss: 0.0088\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0017 - val_loss: 0.0092\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0016 - val_loss: 0.0086\n",
". : 0.79%\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 32us/sample - loss: 0.0174 - val_loss: 0.0251\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0052 - val_loss: 0.0179\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0039 - val_loss: 0.0141\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0033 - val_loss: 0.0133\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0030 - val_loss: 0.0134\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0027 - val_loss: 0.0131\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 26us/sample - loss: 0.0026 - val_loss: 0.0130\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0024 - val_loss: 0.0135\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0023 - val_loss: 0.0128\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0022 - val_loss: 0.0126\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0021 - val_loss: 0.0123\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0020 - val_loss: 0.0118\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0019 - val_loss: 0.0114\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 23us/sample - loss: 0.0018 - val_loss: 0.0110\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0018 - val_loss: 0.0108\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0017 - val_loss: 0.0104\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0017 - val_loss: 0.0101\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 24us/sample - loss: 0.0016 - val_loss: 0.0100\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0016 - val_loss: 0.0096\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 25us/sample - loss: 0.0016 - val_loss: 0.0094\n",
". : 0.86%\n",
"Train on 44896 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44896/44896 [==============================] - 2s 35us/sample - loss: 0.0195 - val_loss: 0.0308\n",
"Epoch 2/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0060 - val_loss: 0.0218\n",
"Epoch 3/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0044 - val_loss: 0.0173\n",
"Epoch 4/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0037 - val_loss: 0.0163\n",
"Epoch 5/20\n",
"44896/44896 [==============================] - 1s 24us/sample - loss: 0.0033 - val_loss: 0.0159\n",
"Epoch 6/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0030 - val_loss: 0.0160\n",
"Epoch 7/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0027 - val_loss: 0.0162\n",
"Epoch 8/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0026 - val_loss: 0.0147\n",
"Epoch 9/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0024 - val_loss: 0.0140\n",
"Epoch 10/20\n",
"44896/44896 [==============================] - 1s 27us/sample - loss: 0.0023 - val_loss: 0.0130\n",
"Epoch 11/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0022 - val_loss: 0.0117\n",
"Epoch 12/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0021 - val_loss: 0.0118\n",
"Epoch 13/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0020 - val_loss: 0.0113\n",
"Epoch 14/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0019 - val_loss: 0.0109\n",
"Epoch 15/20\n",
"44896/44896 [==============================] - 1s 26us/sample - loss: 0.0018 - val_loss: 0.0105\n",
"Epoch 16/20\n",
"44896/44896 [==============================] - 1s 25us/sample - loss: 0.0018 - val_loss: 0.0106\n",
"Epoch 17/20\n",
"44896/44896 [==============================] - 1s 27us/sample - loss: 0.0017 - val_loss: 0.0100\n",
"Epoch 18/20\n",
"44896/44896 [==============================] - 1s 27us/sample - loss: 0.0017 - val_loss: 0.0098\n",
"Epoch 19/20\n",
"44896/44896 [==============================] - 1s 31us/sample - loss: 0.0016 - val_loss: 0.0095\n",
"Epoch 20/20\n",
"44896/44896 [==============================] - 1s 32us/sample - loss: 0.0016 - val_loss: 0.0087\n",
". : 0.80%\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "AzzTxniSPZsq"
},
"source": [
"#####################Contractive Autoencoder ########################"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "jNaXx8kr9iGm",
"outputId": "d29802a0-e588-4f81-9317-ba14b1c49177",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"from keras.layers import Input, Dense\n",
"from keras.models import Model\n",
"import tensorflow.keras.backend as K\n",
"import tensorflow_probability as tfp\n",
"from sklearn.model_selection import KFold\n",
"lam = 1e-5\n",
"\n",
"session = tf.compat.v1.keras.backend.get_session()\n",
"\n",
"tf.compat.v1.disable_eager_execution()\n",
"\n",
"\n",
"tfd = tfp.distributions\n",
"\n",
"init = tf.compat.v1.global_variables_initializer()\n",
"\n",
"with tf.compat.v1.Session() as sess:\n",
" sess.run(init) \n",
"\n",
"inputs = Input(shape=(41,))\n",
"encoded = Dense(41, activation='tanh')(inputs)\n",
"encoded = Dense(32, activation='tanh')(encoded)\n",
"encoded = Dense(24, activation='tanh')(encoded)\n",
" \n",
"encoded = Dense(16, activation='tanh',name='encoded')(encoded)\n",
"\n",
"decoded = Dense(24, activation='tanh')(encoded)\n",
"decoded = Dense(32, activation='tanh')(decoded)\n",
"decoded = Dense(41, activation='tanh')(decoded)\n",
"model=Model(inputs,decoded)\n",
"\n",
"def contractive_loss(y_pred, y_true):\n",
" mse = K.mean(K.square(y_true - y_pred), axis=1)\n",
"\n",
" W = K.variable(value=model.get_layer('encoded').get_weights()[0]) # N x N_hidden\n",
" W = K.transpose(W) # N_hidden x N\n",
" h = model.get_layer('encoded').output\n",
" dh = h * (1 - h) # N_batch x N_hidden\n",
"\n",
" # N_batch x N_hidden * N_hidden x 1 = N_batch x 1\n",
" contractive = lam * K.sum(dh**2 * K.sum(W**2, axis=1), axis=1)\n",
"\n",
" return mse + contractive\n",
"\n",
"model.compile(optimizer='adam',loss=contractive_loss)\n",
"n_split=3\n",
"cvscores = []\n",
"hist=[]\n",
"\n",
"NtrainX=trainX[np.where(y0==0)]\n",
"\n",
"for train_index,test_index in KFold(n_split).split(NtrainX):\n",
" x_train,x_test=NtrainX[train_index],NtrainX[test_index]\n",
"\n",
" history=model.fit(x_train, x_train,validation_data=[testT,testT],epochs=20,batch_size=128)\n",
" hist.append(history)\n",
" scores = model.evaluate(testT, testT)\n",
" cvscores.append(scores * 100)\n",
" print(\". : %.2f%%\" % (scores*100))\n",
" "
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - ETA: 0s - loss: 0.0185WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training_v1.py:2048: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n",
"Instructions for updating:\n",
"This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0185 - val_loss: 0.0243\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0048 - val_loss: 0.0161\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0028 - val_loss: 0.0130\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0021 - val_loss: 0.0107\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0017 - val_loss: 0.0095\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0015 - val_loss: 0.0085\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0014 - val_loss: 0.0079\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 0.0013 - val_loss: 0.0079\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0012 - val_loss: 0.0077\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 0.0011 - val_loss: 0.0074\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 0.0010 - val_loss: 0.0075\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 9.5382e-04 - val_loss: 0.0077\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 9.1406e-04 - val_loss: 0.0073\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 8.7225e-04 - val_loss: 0.0070\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 8.3753e-04 - val_loss: 0.0073\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 8.0632e-04 - val_loss: 0.0065\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 7.8257e-04 - val_loss: 0.0067\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 7.5754e-04 - val_loss: 0.0066\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 7.3583e-04 - val_loss: 0.0062\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 7.1889e-04 - val_loss: 0.0063\n",
". : 0.63%\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 7.0590e-04 - val_loss: 0.0062\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.9019e-04 - val_loss: 0.0064\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 6.7218e-04 - val_loss: 0.0066\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 1s 21us/sample - loss: 6.5908e-04 - val_loss: 0.0063\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.4665e-04 - val_loss: 0.0067\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 6.2692e-04 - val_loss: 0.0065\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 6.1406e-04 - val_loss: 0.0065\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 6.0290e-04 - val_loss: 0.0064\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 5.9214e-04 - val_loss: 0.0063\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 5.7491e-04 - val_loss: 0.0068\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 5.6557e-04 - val_loss: 0.0066\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 5.5285e-04 - val_loss: 0.0064\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 5.4469e-04 - val_loss: 0.0064\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 5.3190e-04 - val_loss: 0.0067\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 5.1984e-04 - val_loss: 0.0064\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 1s 20us/sample - loss: 5.1005e-04 - val_loss: 0.0065\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 5.0133e-04 - val_loss: 0.0069\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 1s 18us/sample - loss: 4.8973e-04 - val_loss: 0.0071\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 4.8107e-04 - val_loss: 0.0066\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 1s 19us/sample - loss: 4.6785e-04 - val_loss: 0.0067\n",
". : 0.67%\n",
"Train on 44896 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 4.7065e-04 - val_loss: 0.0065\n",
"Epoch 2/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 4.5646e-04 - val_loss: 0.0067\n",
"Epoch 3/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 4.4741e-04 - val_loss: 0.0064\n",
"Epoch 4/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 4.3990e-04 - val_loss: 0.0067\n",
"Epoch 5/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 4.3233e-04 - val_loss: 0.0066\n",
"Epoch 6/20\n",
"44896/44896 [==============================] - 1s 17us/sample - loss: 4.2403e-04 - val_loss: 0.0059\n",
"Epoch 7/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 4.1760e-04 - val_loss: 0.0064\n",
"Epoch 8/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 4.1283e-04 - val_loss: 0.0062\n",
"Epoch 9/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 4.0415e-04 - val_loss: 0.0064\n",
"Epoch 10/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 3.9691e-04 - val_loss: 0.0065\n",
"Epoch 11/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 3.9180e-04 - val_loss: 0.0063\n",
"Epoch 12/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 3.8722e-04 - val_loss: 0.0058\n",
"Epoch 13/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 3.8256e-04 - val_loss: 0.0058\n",
"Epoch 14/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 3.7661e-04 - val_loss: 0.0059\n",
"Epoch 15/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 3.7210e-04 - val_loss: 0.0060\n",
"Epoch 16/20\n",
"44896/44896 [==============================] - 1s 18us/sample - loss: 3.6588e-04 - val_loss: 0.0059\n",
"Epoch 17/20\n",
"44896/44896 [==============================] - 1s 20us/sample - loss: 3.6382e-04 - val_loss: 0.0057\n",
"Epoch 18/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 3.6050e-04 - val_loss: 0.0059\n",
"Epoch 19/20\n",
"44896/44896 [==============================] - 1s 21us/sample - loss: 3.5777e-04 - val_loss: 0.0055\n",
"Epoch 20/20\n",
"44896/44896 [==============================] - 1s 19us/sample - loss: 3.5160e-04 - val_loss: 0.0058\n",
". : 0.58%\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "_gQugs8-nbs3"
},
"source": [
"##############################################################Convolutional Autoencoder################################################"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "ZB5Az-M549OM"
},
"source": [
"#convolutional autoenoder\n",
"\n",
"\n",
"from keras.engine.topology import Layer\n",
"from keras.models import Sequential\n",
"import keras.backend as K\n",
"from keras.layers import Conv2DTranspose,BatchNormalization,Dropout, Input, Conv1D, MaxPooling1D, UpSampling1D, Dense, Activation, Lambda, Reshape, Flatten\n",
"\n",
"class Conv1DTranspose(Layer):\n",
" def __init__(self, filters, kernel_size, strides=1, *args, **kwargs):\n",
" self._filters = filters\n",
" self._kernel_size = (1, kernel_size)\n",
" self._strides = (1, strides)\n",
" self._args, self._kwargs = args, kwargs\n",
" super(Conv1DTranspose, self).__init__()\n",
"\n",
" def build(self, input_shape):\n",
" print(\"build\", input_shape)\n",
" self._model = Sequential()\n",
" self._model.add(Lambda(lambda x: K.expand_dims(x,axis=1), batch_input_shape=input_shape))\n",
" self._model.add(Conv2DTranspose(self._filters,\n",
" kernel_size=self._kernel_size,\n",
" strides=self._strides,\n",
" *self._args, **self._kwargs))\n",
" self._model.add(Lambda(lambda x: x[:,0]))\n",
" self._model.summary()\n",
" super(Conv1DTranspose, self).build(input_shape)\n",
"\n",
" def call(self, x):\n",
" return self._model(x)\n",
"\n",
" def compute_output_shape(self, input_shape):\n",
" return self._model.compute_output_shape(input_shape)\n",
"\n",
"\n",
"\n",
"from numpy import zeros, newaxis\n",
"xin=trainX[:,:,newaxis]\n",
"xin_test=testT[:,:,newaxis]\n",
"xin.shape\n",
"\n",
"\n",
"def create_Convolutional_network():\n",
"\tinp = Input(shape=(xin.shape[1],1))\n",
"\tconv1 = Conv1D(8, 3, activation='tanh', padding='same')(inp) #41\n",
"\tconv1 = BatchNormalization()(conv1)\n",
"\tconv1 = MaxPooling1D(2)(conv1)\n",
"\tconv7b = Conv1D(8, 3, activation='tanh', padding='same')(conv1)\n",
"\tconv7b = BatchNormalization()(conv7b) \n",
" \n",
"\n",
"\tconv7b = UpSampling1D(2)(conv7b) # 40 \n",
"\tconv7 = Conv1D(8, 3, activation='tanh', padding='same')(conv7b) # 40\n",
"\tconv7 = BatchNormalization()(conv7)\n",
"\tdecoded = Conv1DTranspose(1, 2, activation='tanh',padding='valid')(conv7)\n",
"\tautoencoder = Model(inp, decoded)\n",
" \n",
"\tautoencoder.compile(optimizer='adam', loss='mean_squared_error')\n",
"\treturn autoencoder"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "Ocy1qEGP6ndC",
"outputId": "d606f4d4-1395-48db-9974-ccfb6c24ffc0",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 1000
}
},
"source": [
"from sklearn.model_selection import KFold\n",
"\n",
"n_split=3\n",
"cvscores = []\n",
"hist=[]\n",
"NtrainX=xin[np.where(y0==0)]\n",
"\n",
"for train_index,test_index in KFold(n_split).split(NtrainX):\n",
" x_train,x_test=NtrainX[train_index],NtrainX[test_index]\n",
" model=create_Convolutional_network()\n",
" history=model.fit(x_train, x_train,validation_data=[xin_test,xin_test], epochs=20,batch_size=128)\n",
" hist.append(history)\n",
" scores = model.evaluate(xin_test, xin_test)\n",
" cvscores.append(scores * 100)\n",
" print(\". : %.2f%%\" % (scores*100))"
],
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"text": [
"build (None, 40, 8)\n",
"Model: \"sequential\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"lambda (Lambda) (None, 1, 40, 8) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose (Conv2DTran (None, 1, 41, 1) 17 \n",
"_________________________________________________________________\n",
"lambda_1 (Lambda) (None, 41, 1) 0 \n",
"=================================================================\n",
"Total params: 17\n",
"Trainable params: 17\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 0.0481 - val_loss: 0.0304\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 4s 93us/sample - loss: 0.0072 - val_loss: 0.0210\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 4s 94us/sample - loss: 0.0048 - val_loss: 0.0169\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0036 - val_loss: 0.0137\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 4s 95us/sample - loss: 0.0028 - val_loss: 0.0106\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0022 - val_loss: 0.0090\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0018 - val_loss: 0.0077\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 0.0015 - val_loss: 0.0066\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0014 - val_loss: 0.0055\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 5s 108us/sample - loss: 0.0012 - val_loss: 0.0050\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 5s 113us/sample - loss: 0.0011 - val_loss: 0.0043\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 9.9233e-04 - val_loss: 0.0035\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 9.0046e-04 - val_loss: 0.0032\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 8.1522e-04 - val_loss: 0.0027\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 7.7665e-04 - val_loss: 0.0026\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 7.1646e-04 - val_loss: 0.0025\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 6.6815e-04 - val_loss: 0.0024\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 6.3995e-04 - val_loss: 0.0022\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 6.0393e-04 - val_loss: 0.0023\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 5.7582e-04 - val_loss: 0.0021\n",
". : 0.21%\n",
"build (None, 40, 8)\n",
"Model: \"sequential_1\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"lambda_2 (Lambda) (None, 1, 40, 8) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_1 (Conv2DTr (None, 1, 41, 1) 17 \n",
"_________________________________________________________________\n",
"lambda_3 (Lambda) (None, 41, 1) 0 \n",
"=================================================================\n",
"Total params: 17\n",
"Trainable params: 17\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"Train on 44895 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44895/44895 [==============================] - 4s 100us/sample - loss: 0.0514 - val_loss: 0.0272\n",
"Epoch 2/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 0.0084 - val_loss: 0.0159\n",
"Epoch 3/20\n",
"44895/44895 [==============================] - 4s 95us/sample - loss: 0.0051 - val_loss: 0.0115\n",
"Epoch 4/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 0.0036 - val_loss: 0.0092\n",
"Epoch 5/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0028 - val_loss: 0.0084\n",
"Epoch 6/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 0.0024 - val_loss: 0.0081\n",
"Epoch 7/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0020 - val_loss: 0.0080\n",
"Epoch 8/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0017 - val_loss: 0.0078\n",
"Epoch 9/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 0.0015 - val_loss: 0.0070\n",
"Epoch 10/20\n",
"44895/44895 [==============================] - 4s 95us/sample - loss: 0.0013 - val_loss: 0.0067\n",
"Epoch 11/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0012 - val_loss: 0.0062\n",
"Epoch 12/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 0.0011 - val_loss: 0.0055\n",
"Epoch 13/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0011 - val_loss: 0.0046\n",
"Epoch 14/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 0.0010 - val_loss: 0.0042\n",
"Epoch 15/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 9.5235e-04 - val_loss: 0.0037\n",
"Epoch 16/20\n",
"44895/44895 [==============================] - 4s 97us/sample - loss: 9.0289e-04 - val_loss: 0.0034\n",
"Epoch 17/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 8.5168e-04 - val_loss: 0.0033\n",
"Epoch 18/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 8.1431e-04 - val_loss: 0.0031\n",
"Epoch 19/20\n",
"44895/44895 [==============================] - 4s 98us/sample - loss: 7.7705e-04 - val_loss: 0.0028\n",
"Epoch 20/20\n",
"44895/44895 [==============================] - 4s 96us/sample - loss: 7.3913e-04 - val_loss: 0.0027\n",
". : 0.27%\n",
"build (None, 40, 8)\n",
"Model: \"sequential_2\"\n",
"_________________________________________________________________\n",
"Layer (type) Output Shape Param # \n",
"=================================================================\n",
"lambda_4 (Lambda) (None, 1, 40, 8) 0 \n",
"_________________________________________________________________\n",
"conv2d_transpose_2 (Conv2DTr (None, 1, 41, 1) 17 \n",
"_________________________________________________________________\n",
"lambda_5 (Lambda) (None, 41, 1) 0 \n",
"=================================================================\n",
"Total params: 17\n",
"Trainable params: 17\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"Train on 44896 samples, validate on 22543 samples\n",
"Epoch 1/20\n",
"44896/44896 [==============================] - 5s 103us/sample - loss: 0.0543 - val_loss: 0.0310\n",
"Epoch 2/20\n",
"44896/44896 [==============================] - 4s 99us/sample - loss: 0.0092 - val_loss: 0.0174\n",
"Epoch 3/20\n",
"44896/44896 [==============================] - 4s 99us/sample - loss: 0.0051 - val_loss: 0.0115\n",
"Epoch 4/20\n",
"44896/44896 [==============================] - 4s 100us/sample - loss: 0.0034 - val_loss: 0.0083\n",
"Epoch 5/20\n",
"44896/44896 [==============================] - 4s 98us/sample - loss: 0.0025 - val_loss: 0.0069\n",
"Epoch 6/20\n",
"44896/44896 [==============================] - 4s 99us/sample - loss: 0.0020 - val_loss: 0.0062\n",
"Epoch 7/20\n",
"44896/44896 [==============================] - 4s 96us/sample - loss: 0.0017 - val_loss: 0.0050\n",
"Epoch 8/20\n",
"44896/44896 [==============================] - 4s 97us/sample - loss: 0.0014 - val_loss: 0.0048\n",
"Epoch 9/20\n",
"44896/44896 [==============================] - 4s 98us/sample - loss: 0.0013 - val_loss: 0.0040\n",
"Epoch 10/20\n",
"44896/44896 [==============================] - 4s 98us/sample - loss: 0.0011 - val_loss: 0.0037\n",
"Epoch 11/20\n",
"44896/44896 [==============================] - 4s 98us/sample - loss: 0.0010 - val_loss: 0.0035\n",
"Epoch 12/20\n",
"44896/44896 [==============================] - 4s 97us/sample - loss: 9.3159e-04 - val_loss: 0.0032\n",
"Epoch 13/20\n",
"44896/44896 [==============================] - 4s 96us/sample - loss: 8.5419e-04 - val_loss: 0.0032\n",
"Epoch 14/20\n",
"44896/44896 [==============================] - 4s 97us/sample - loss: 7.7956e-04 - val_loss: 0.0029\n",
"Epoch 15/20\n",
"44896/44896 [==============================] - 4s 99us/sample - loss: 7.3116e-04 - val_loss: 0.0027\n",
"Epoch 16/20\n",
"44896/44896 [==============================] - 4s 98us/sample - loss: 6.8515e-04 - val_loss: 0.0024\n",
"Epoch 17/20\n",
"44896/44896 [==============================] - 4s 100us/sample - loss: 6.4819e-04 - val_loss: 0.0021\n",
"Epoch 18/20\n",
"44896/44896 [==============================] - 5s 101us/sample - loss: 6.1847e-04 - val_loss: 0.0020\n",
"Epoch 19/20\n",
"44896/44896 [==============================] - 5s 103us/sample - loss: 5.8845e-04 - val_loss: 0.0021\n",
"Epoch 20/20\n",
"44896/44896 [==============================] - 4s 100us/sample - loss: 5.6044e-04 - val_loss: 0.0020\n",
". : 0.20%\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "pre7YAeGrhQj"
},
"source": [
"threshold=history.history[\"loss\"][-1]\n",
"threshold\n",
"import matplotlib.pyplot as plt\n",
"H1=hist.pop()\n",
"H2=hist.pop()\n",
"H3=hist.pop()"
],
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"id": "nm2Ycjj-pPHZ",
"outputId": "7eb99e06-4915-48cc-f4b1-398f1c28ca94",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 295
}
},
"source": [
"\n",
"history=H3\n",
"loss_train = history.history['loss']\n",
"loss_val = history.history['val_loss']\n",
"epochs = range(20)\n",
"plt.plot(epochs, loss_train, 'g', label='Training loss')\n",
"plt.plot(epochs, loss_val, 'b', label='validation loss')\n",
"plt.title('Training and Validation loss')\n",
"plt.xlabel('Epochs')\n",
"plt.ylabel('Loss')\n",
"plt.legend()\n",
"plt.show()"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEWCAYAAABxMXBSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxU1dnA8d8zmSwsSUgIyCoQBpFFZImCBhTUKq7UKorFIuJel6q1Sq0LtfW1Wkt5UbTFheJWsVgUK4r1FQW0omyCbBq2GgRkTQIEsp33j3MnTMIkmUnmzmR5vp/P/cyde8+995mbZJ6ce849V4wxKKWUUqHyxDoApZRSDYsmDqWUUmHRxKGUUiosmjiUUkqFRROHUkqpsGjiUEopFRZNHCqmROQ9Ebkm0mVjSUS2iMg5Luz3YxG53pkfKyIfhFK2Fsc5XkQOiEhcbWOtZt9GRHyR3q+KLk0cKmzOl4p/KhORwoD3Y8PZlzHmfGPMzEiXrY9EZKKILAyyPENEikSkb6j7Msa8aow5N0JxVUh0xpj/GmNaGmNKI7F/1fho4lBhc75UWhpjWgL/BS4OWPaqv5yIeGMXZb30CnC6iHSrtHwMsNoY83UMYlIqbJo4VMSIyHARyRWR+0RkBzBDRNJE5F8isktE9jnznQK2Cbz8Ml5EFovIk07ZzSJyfi3LdhORhSJSICIfisg0EXmlirhDifF3IvKps78PRCQjYP3PRGSriOwRkd9UdX6MMbnAR8DPKq0aB7xUUxyVYh4vIosD3v9IRNaLSJ6IPA1IwLruIvKRE99uEXlVRFo5614GjgfecWqM94pIV+eSktcp00FE5orIXhHJEZEbAvY9SUTeEJGXnHOzRkSyqjoHlT5DqrPdLuf8PSAiHmedT0Q+cT7PbhGZ5SwXEfmziPwgIvkisjqcmpqKDE0cKtLaAelAF+BG7O/YDOf98UAh8HQ12w8GNgAZwBPACyIitSj7GvAF0BqYxLFf1oFCifGnwLVAWyABuAdARHoDzzr77+AcL+iXvWNmYCwi0hPo78Qb7rny7yMD+CfwAPZcbASyA4sAjznx9QI6Y88JxpifUbHW+ESQQ7wO5DrbXw78j4icFbD+EqdMK2BuKDE7ngJSgUzgTGwCvdZZ9zvgAyANez6fcpafC5wBnOBsewWwJ8TjqUgxxuikU60nYAtwjjM/HCgCkqop3x/YF/D+Y+B6Z348kBOwrjlggHbhlMV+6ZYAzQPWvwK8EuJnChbjAwHvfw6878w/BLwesK6Fcw7OqWLfzYF84HTn/aPA27U8V4ud+XHA5wHlBPtFf30V+/0xsCLYz9B539U5l15skikFkgPWPwb8zZmfBHwYsK43UFjNuTWAD4hzzlPvgHU3AR878y8B04FOlbY/C/gGGAJ4Yv3731QnrXGoSNtljDnsfyMizUXkr86liHxgIdBKqu6xs8M/Y4w55My2DLNsB2BvwDKA76oKOMQYdwTMHwqIqUPgvo0xB6nmP2Anpn8A45za0Vjsl2RtzpVf5RhM4HsROU5EXheRbc5+X8HWTELhP5cFAcu2Ah0D3lc+N0lSc/tWBhDv7CvYfu/FJsAvnMtfE5zP9hG2RjMN+EFEpotISoifRUWIJg4VaZWHW/4l0BMYbIxJwV5mgIBr8C7YDqSLSPOAZZ2rKV+XGLcH7ts5ZusatpmJvcTyIyAZeKeOcVSOQaj4ef8H+3M5ydnv1ZX2Wd0Q2d9jz2VywLLjgW01xFST3UAx9rLcMfs1xuwwxtxgjOmArYk8I043XmPMVGPMIGzt5gTgV3WMRYVJE4dyWzL2Wv1+EUkHHnb7gMaYrcBSYJKIJIjIacDFLsU4G7hIRIaKSALwCDX/XS0C9mMvxbxujCmqYxzvAn1E5CfOf/p3YC/Z+SUDB4A8EenIsV+0O7HtDMcwxnwHfAY8JiJJItIPuA5ba6k1Y7v6vgE8KiLJItIFuNu/XxEZHdAxYB82uZWJyCkiMlhE4oGDwGGgrC6xqPBp4lBumwI0w/6H+TnwfpSOOxY4DXvZ6PfALOBIFWVrHaMxZg1wK7Zxezv2Sy63hm0M9vJUF+e1TnEYY3YDo4E/YD9vD+DTgCK/BQYCedgk889Ku3gMeEBE9ovIPUEOcRW23eN7YA7wsDHmw1Biq8Ht2C//TcBi7Dl80Vl3CrBERA5gG9x/YYzZBKQAz2HP81bs5/1jBGJRYRCnwUmpRs3pzrneGON6jUepxk5rHKpRci5pdBcRj4iMBEYBb8U6LqUaA72zVzVW7bCXZFpjLx3dYoxZEduQlGoc9FKVUkqpsOilKqWUUmFpEpeqMjIyTNeuXWMdhlJKNSjLli3bbYxpU3m5q4nDaZT8X+zwAs8bY/5QaX0itjviIGy3uiuNMVtE5EfYroUJ2GEJfuXcMYqIfAy0x/Z3BzjXGPNDdXF07dqVpUuXRuxzKaVUUyAiW4Mtdy1xOMMkTMPeHZsLfCkic40xawOKXYcdi8cnImOAx4Ersf3YLzbGfO+MfDmfikMcjDXGaCZQSqkYcLON41TsIHSbnDtjX8d2iQw0Cjv8Atg7cM8WETHGrDDGfO8sXwM0c2onSimlYszNxNGRigPL5VKx1lChjDGmBHtna+Vxfi4DlhtjAu/6nSEiK0XkwWqG3FZKKeWCet04LiJ9sJevAh+ROdYYs80ZdO1N7LMNXgqy7Y3Y50Fw/PHHRyFapVSg4uJicnNzOXz4cM2FVUwlJSXRqVMn4uPjQyrvZuLYRsUROjtx7Iia/jK5zuBsqThDUjsDnM0BxhljNvo3MMb4R88sEJHXsJfEjkkcxpjp2EHkyMrK0ptVlIqy3NxckpOT6dq1K3phoP4yxrBnzx5yc3Pp1q3yU42Dc/NS1ZdAD7GP8EzAPld5bqUyc4FrnPnLgY+MMUbsYy3fBSYaY8oHaxMRr/O0M5zRMS8C9DnNStVDhw8fpnXr1po06jkRoXXr1mHVDF1LHE6bxW3YHlHrgDeMMWtE5BERucQp9gLQWkRysEMqT3SW34Z9SthDTlvGShFpCyQC80VkFbASW2N5zq3PoJSqG00aDUO4PydX2ziMMfOAeZWWPRQwfxg7HHTl7X6PHQo7mEGRjLE6zzwDrVvDlVdG64hKKVX/6ZAj1ZgxA559NtZRKKVqY8+ePfTv35/+/fvTrl07OnbsWP6+qKio2m2XLl3KHXfcUeMxTj/99IjE+vHHH3PRRRdFZF/RUK97VcVadjZMnw5FRZCQEOtolFLhaN26NStXrgRg0qRJtGzZknvuOfqcqpKSErze4F+BWVlZZGVl1XiMzz77LDLBNjBa46jG0KFQWAgrdDBupRqF8ePHc/PNNzN48GDuvfdevvjiC0477TQGDBjA6aefzoYNG4CKNYBJkyYxYcIEhg8fTmZmJlOnTi3fX8uWLcvLDx8+nMsvv5wTTzyRsWPH4h95fN68eZx44okMGjSIO+64o8aaxd69e/nxj39Mv379GDJkCKtWrQLgk08+Ka8xDRgwgIKCArZv384ZZ5xB//796du3L4sWLYr4OQtGaxzVyM62r59+CoMHxzYWpRqyO9+/k5U7VkZ0n/3b9WfKyClhb5ebm8tnn31GXFwc+fn5LFq0CK/Xy4cffsj999/Pm2++ecw269evZ8GCBRQUFNCzZ09uueWWY+55WLFiBWvWrKFDhw5kZ2fz6aefkpWVxU033cTChQvp1q0bV111VY3xPfzwwwwYMIC33nqLjz76iHHjxrFy5UqefPJJpk2bRnZ2NgcOHCApKYnp06dz3nnn8Zvf/IbS0lIOHToU9vmoDa1xVKN9e8jMhMWLYx2JUipSRo8eTVxcHAB5eXmMHj2avn37ctddd7FmzZqg21x44YUkJiaSkZFB27Zt2blz5zFlTj31VDp16oTH46F///5s2bKF9evXk5mZWX5/RCiJY/HixfzsZz8D4KyzzmLPnj3k5+eTnZ3N3XffzdSpU9m/fz9er5dTTjmFGTNmMGnSJFavXk1ycnJtT0tYtMZRg+xsmD8fjAHtWahU7dSmZuCWFi1alM8/+OCDjBgxgjlz5rBlyxaGDx8edJvExKND5cXFxVFSUlKrMnUxceJELrzwQubNm0d2djbz58/njDPOYOHChbz77ruMHz+eu+++m3HjxkX0uMFojaMGQ4fCDz/Axo01l1VKNSx5eXl07GiH0Pvb3/4W8f337NmTTZs2sWXLFgBmzZpV4zbDhg3j1VdfBWzbSUZGBikpKWzcuJGTTjqJ++67j1NOOYX169ezdetWjjvuOG644Qauv/56li9fHvHPEIwmjhr42zn0cpVSjc+9997Lr3/9awYMGBDxGgJAs2bNeOaZZxg5ciSDBg0iOTmZ1NTUareZNGkSy5Yto1+/fkycOJGZM+0A4lOmTKFv377069eP+Ph4zj//fD7++GNOPvlkBgwYwKxZs/jFL34R8c8QTJN45nhWVpap7YOcysogIwMuuwye03vUlQrZunXr6NWrV6zDiLkDBw7QsmVLjDHceuut9OjRg7vuuivWYR0j2M9LRJYZY47pl6w1jhp4PHD66VrjUErVznPPPUf//v3p06cPeXl53HTTTbEOqc60cTwEQ4fCu+/C7t229qGUUqG666676mUNoy60xhECfztHE71JVCmlKtDEEYJTTrFDjnz6ac1llVKqsdPEEYKkJBg0SNs5lFIKNHGEbOhQWLoU9CmYSqmmThNHiLKz7Si5tezVq5RqAPyDFn7//fdcfvnlQcsMHz6cmrr3T5kypcK4URdccAH79++vc3yTJk3iySefrPN+6koTR4j8w+5rO4dSjV+HDh2YPXt2rbevnDjmzZtHq1atIhFavaCJI0Rt2kDPntrOoVRDMXHiRKZNm1b+3v/f+oEDBzj77LMZOHAgJ510Em+//fYx227ZsoW+ffsCUFhYyJgxY+jVqxeXXnophYWF5eVuueUWsrKy6NOnDw8//DAAU6dO5fvvv2fEiBGMGDECgK5du7J7924AJk+eTN++fenbty9TpkwpP16vXr244YYb6NOnD+eee26F4wSzcuVKhgwZQr9+/bj00kvZt29f+fF79+5Nv379GDNmDBB8SPa60Ps4wjB0KMyZY+8m92jKVSpkd94JKyM7qjr9+8OUasZOvPLKK7nzzju59dZbAXjjjTeYP38+SUlJzJkzh5SUFHbv3s2QIUO45JJLqnzu9rPPPkvz5s1Zt24dq1atYuDAgeXrHn30UdLT0yktLeXss89m1apV3HHHHUyePJkFCxaQUenGr2XLljFjxgyWLFmCMYbBgwdz5plnkpaWxrfffsvf//53nnvuOa644grefPNNrr766io/37hx43jqqac488wzeeihh/jtb3/LlClT+MMf/sDmzZtJTEwsvzwWbEj2utCvvzBkZ8PevbB+fawjUUrVZMCAAfzwww98//33fPXVV6SlpdG5c2eMMdx///3069ePc845h23btgUdJt1v4cKF5V/g/fr1o1+/fuXr3njjDQYOHMiAAQNYs2YNa9eurTamxYsXc+mll9KiRQtatmzJT37yk/KHL3Xr1o3+/fsDMGjQoPKBEYPJy8tj//79nHnmmQBcc801LFy4sDzGsWPH8sorr5Q/4TDYkOx1oTWOMAwdal8//RR6945tLEo1JNXVDNw0evRoZs+ezY4dO7jyyisBePXVV9m1axfLli0jPj6erl27crgW3SU3b97Mk08+yZdffklaWhrjx4+v1X78Kg/LXtOlqqq8++67LFy4kHfeeYdHH32U1atXBx2S/cQTT6x1rFrjCIPPZ9s6tJ1DqYbhyiuv5PXXX2f27NmMHj0asP+tt23blvj4eBYsWMDWrVur3ccZZ5zBa6+9BsDXX39d/ijX/Px8WrRoQWpqKjt37uS9994r3yY5OTloO8KwYcN46623OHToEAcPHmTOnDkMGzYs7M+VmppKWlpaeW3l5Zdf5swzz6SsrIzvvvuOESNG8Pjjj5OXl8eBAweCDsleF1rjCIOIrXVozyqlGoY+ffpQUFBAx44dad++PQBjx47l4osv5qSTTiIrK6vG/7xvueUWrr32Wnr16kWvXr0YNGgQQPlw5ieeeCKdO3cm2z82EXDjjTcycuRIOnTowIIFC8qXDxw4kPHjx3PqqacCcP311zNgwIBqL0tVZebMmdx8880cOnSIzMxMZsyYQWlpKVdffTV5eXkYY7jjjjto1aoVDz74IAsWLMDj8dCnTx/OP//8sI8XSIdVD9Of/gT33APbt0O7dhHZpVKNkg6r3rDosOouCmznUEqppkgTR5gGDLBjV2k7h1KqqdLEEaaEBBg8WGscSoWiKVwKbwzC/Tlp4qiF7GxYvhwOHox1JErVX0lJSezZs0eTRz1njGHPnj1h3RSovapqITsbSkvhiy/AGVFAKVVJp06dyM3NZdeuXbEORdUgKSmJTp06hVxeE0ctnHaa7Zq7eLEmDqWqEh8fT7du3WIdhnKBXqqqhbQ06NNH2zmUUk2TJo5aGjrUPoO8tDTWkSilVHRp4qil7GwoKICvv451JEopFV2uJg4RGSkiG0QkR0QmBlmfKCKznPVLRKSrs/xHIrJMRFY7r2cFbDPIWZ4jIlOlqrGQXea/EVDv51BKNTWuJQ4RiQOmAecDvYGrRKTymLLXAfuMMT7gz8DjzvLdwMXGmJOAa4CXA7Z5FrgB6OFMI936DNXp0gU6dNB2DqVU0+NmjeNUIMcYs8kYUwS8DoyqVGYUMNOZnw2cLSJijFlhjPneWb4GaObUTtoDKcaYz43tHP4S8GMXP0OV/AMeao1DKdXUuJk4OgLfBbzPdZYFLWOMKQHygNaVylwGLDfGHHHK59awTwBE5EYRWSoiS93qR56dDd99B//9ryu7V0qpeqleN46LSB/s5aubwt3WGDPdGJNljMlq06ZN5INDBzxUSjVNbiaObUDngPednGVBy4iIF0gF9jjvOwFzgHHGmI0B5QNvbwy2z6jp1w9atNDEoZRqWtxMHF8CPUSkm4gkAGOAuZXKzMU2fgNcDnxkjDEi0gp4F5hojCn/WjbGbAfyRWSI05tqHPC2i5+hWl6vvYtc2zmUUk2Ja4nDabO4DZgPrAPeMMasEZFHROQSp9gLQGsRyQHuBvxddm8DfMBDIrLSmdo6634OPA/kABuBo89rjIHsbFi9GvLyYhmFUkpFjz4BsI4+/BB+9CN4/3047zxXDqGUUjGhTwB0yeDB4PFoO4dSqunQxFFHycnQv7+2cyilmg5NHBGQnQ1LlkBxcawjUUop92niiIChQ+HQIVi5MtaRKKWU+zRxREB2tn3Vdg6lVFOgiSMCOnaErl21nUMp1TRo4qjGY4se48UVL4ZUNjvb1jiaQO9mpVQTp4mjGm+ue5M31rwRUtmhQ2HHDti0yeWglFIqxjRxVKN7endy9uaEVFbbOZRSTYUmjmr40nxs2b+F4tKa+9n26QOpqdrOoZRq/DRxVMOX7qPUlLI1b2uNZT0eOP10rXEopRo/TRzV8KX7ANi4d2MNJa2hQ2HtWti7182olFIqtjRxVMOfOMJt5/jsM7ciUkqp2NPEUY12LdvRPL55yInjlFMgPl7bOZRSjZsmjmqICN3TupOzL7TE0bw5DByo7RxKqcZNE0cNfOm+kNs4wLZzfPklHDniYlBKKRVDmjhq4Ev3sXHfRkrLSkMqn51tk8ayZS4HppRSMaKJowa+dB9FpUVsK9gWUnl/A7m2cyilGitNHDXontYdCL1nVdu20KOHtnMopRovTRw1CLdLLth2Dh3wUCnVWGniqEGnlE4kxCWE1UCenQ179sCGDS4GppRSMaKJowZxnjgy0zJD7pILtsYB2s6hlGqcNHGEoHta6KPkApxwAmRkaDuHUqpx0sQRAv+9HCbERgsRe7lKaxxKqcZIE0cIfOk+DhYfZOfBnSFvk50NOTmwM/RNlFKqQdDEEYLa9qwCHfBQKdX4aOIIQW0Sx8CBkJgIixa5FZVSSsWGJo4QdEntQpzEhZU4EhPhRz+CGTPss8iVUqqx0MQRgvi4eLq06hJW4gB48kk4dAjuvtulwJRSKgY0cYTIP9hhOHr2hF//Gv7+d/jgA5cCU0qpKNPEESJfmo9v93wbcpdcv4kT7X0dt9wChYUuBaeUUlGkiSNEvnQfeUfy2FsY3gPFk5LgL3+BTZvg9793KTillIoiTRwh6p4e3ii5gUaMgHHj4IknYM2aSEemlFLR5WriEJGRIrJBRHJEZGKQ9YkiMstZv0REujrLW4vIAhE5ICJPV9rmY2efK52prZufwc/fJTfcdg6/J5+ElBS4+WYoK4tkZEopFV2uJQ4RiQOmAecDvYGrRKR3pWLXAfuMMT7gz8DjzvLDwIPAPVXsfqwxpr8z/RD56I+VmZaJILWqcQC0aWOTx+LF8OKLEQ5OKaWiyM0ax6lAjjFmkzGmCHgdGFWpzChgpjM/GzhbRMQYc9AYsxibQOqFJG8SnVI61TpxAIwfD2ecAffeCz9EJd0ppVTkuZk4OgLfBbzPdZYFLWOMKQHygNYh7HuGc5nqQRGRYAVE5EYRWSoiS3ft2hV+9EF0Tw9vlNxjY4K//hUOHIBf/jIiISmlVNQ1xMbxscaYk4BhzvSzYIWMMdONMVnGmKw2bdpE5MC+NF+dEgfAiSfaLrqvvAIffhiRsJRSKqrcTBzbgM4B7zs5y4KWEREvkArsqW6nxphtzmsB8Br2klhU+NJ97Dq0i/wj+XXaz/33g89n7+04XG8uximlVGjcTBxfAj1EpJuIJABjgLmVyswFrnHmLwc+MtXcYSciXhHJcObjgYuAryMeeRXKe1aF8RjZYJKS4Nln7bDr//M/kYhMKaWix7XE4bRZ3AbMB9YBbxhj1ojIIyJyiVPsBaC1iOQAdwPlXXZFZAswGRgvIrlOj6xEYL6IrAJWYmssz7n1GSqrzSi5VTnnHLj6avjDH2DdujrvTimlosbr5s6NMfOAeZWWPRQwfxgYXcW2XavY7aBIxReuzLRMIDKJA+BPf4J337X3dnz8sW08V0qp+q4hNo7HTHJiMse1OK7WNwFW1ratvZt84UL4298iskullHKdJo4w+dLr3rMq0IQJ9mmB99wDEeo1rJRSrtLEEaZIJw6Px97bUVAAv/pVxHarlFKu0cQRJl+6j20F2zhUfChi++zd2yaNmTNhwYKI7VYppVyhiSNM3dPsKLmb9m2K6H4feAAyM21D+ZEjEd21UkpFlCaOMEXqXo7KmjWz93Z8843toquUUvWVJo4wRfJejsrOPReuusreFLhhQ8R3r5RSERFS4hCRFiLiceZPEJFLnDu3m5y0ZmmkN0t3JXEATJ4MzZvbS1ZhPqVWKaWiItQax0IgSUQ6Ah9gBxb8m1tB1Xfd07qTs8+dxNGuHTz+uL0h8OWXXTmEUkrVSaiJQ4wxh4CfAM8YY0YDfdwLq37zpfsi3sYR6Prr4fTT4e67Yfdu1w6jlFK1EnLiEJHTgLHAu86yOHdCqv986T625m2lqLTIlf17PPCXv0BeHtx+u16yUkrVL6EmjjuBXwNznIEKM4Eme8eBL91HmSljy/4trh3jpJNg0iR4/XX44x9dO4xSSoUtpEEOjTGfAJ8AOI3ku40xd7gZWH0W2LPqhNYnuHac+++H1avtg5969YKLL3btUEopFbJQe1W9JiIpItIC+/yLtSLSZAfI8N8E6GY7B9jRcl98EQYOhJ/+FL6O2pNHlFKqaqFequptjMkHfgy8B3Sjike2NgVtW7SlZUJL17rkBmreHN5+G5KTbY1DB0JUSsVaqIkj3rlv48fAXGNMMdBkm2xFxA526FKX3Mo6doS33oLt2+Hyy6HInTZ5pZQKSaiJ46/AFqAFsFBEugB1e/B2AxfpUXJrcuqp9rLVwoVw663a00opFTshJQ5jzFRjTEdjzAXG2gqMcDm2es2X5mPzvs2UlpVG7Zg//altMH/+eXjqqagdVimlKgi1cTxVRCaLyFJn+hO29tFkdU/vTnFZMd/lfxfV4/7udzBqFNx1F3zwQVQPrZRSQOiXql4ECoArnCkfmOFWUA2Bm4MdVsfjgVdegb594YordDBEpVT0hZo4uhtjHjbGbHKm3wKZbgZW38UqcQC0bAlz50JCgu1ptW9f1ENQSjVhoSaOQhEZ6n8jItlAoTshNQwdkjuQ5E2KSeIA6NIF/vlP2LLF1jxKSmIShlKqCQo1cdwMTBORLSKyBXgauMm1qBoAj3jITMtk4z53bwKsztChdkyrDz+0AyIqpVQ0hDrkyFfAySKS4rzPF5E7gVVuBlffRbtLbjATJsCaNfY5Hn36wE1NOp0rpaIhrCcAGmPynTvIAZr8/7i+NDu8epkpi2kcTzwBI0fCbbfZ53gopZSb6vLoWIlYFA2UL91HYUkh2wu2xzSOuDg7iq7PZ+8s37QppuEopRq5uiSOJn/vcix7VlWWmgrvvANlZXDJJZDfpO/rV0q5qdrEISIFIpIfZCoAOkQpxnqre7ozSm4MG8gD+XwwezasXw9jx0Jp9G5qV0o1IdUmDmNMsjEmJciUbIwJqWG9MTs+9Xi8Hm+9qHH4nXWWHY7kX/+yw5MopVSkNfkv/7rwerx0a9WtXiUOgFtusc/ueOIJ6NzZDoooTb5FSikVKXVp41DUjy65wUyZYnta3X67fd1YP66mKaUaAU0cdeRL97Fx30ZMPRvnPD7eXq6aNg0+/9yObfXYY1BcHOvIlFINnSaOOuqe1p38I/nsPrQ71qEcIy4Ofv5zWLcOLrzQtnkMHAj/+U+sI1NKNWSuJg4RGSkiG0QkR0QmBlmfKCKznPVLRKSrs7y1iCwQkQMi8nSlbQaJyGpnm6kisb16X5+65FalQwfb22ruXMjLg+xsm1D27491ZEqphsi1xCEiccA04HygN3CViPSuVOw6YJ8xxgf8GXjcWX4YeBC4J8iunwVuAHo408jIRx+6hpA4/C6+GNauhTvvhL/+FXr1gn/8Q58mqJQKj5s1jlOBHGcY9iLgdWBUpTKjgJnO/GzgbBERY8xBY8xibAIpJyLtgRRjzOfGNiq8hH0Oesx0bdUVj3gaROIAOyT75MnwxQPOjWMAABn6SURBVBe2JnLFFTahbN0a68iUUg2Fm4mjIxD4eLxcZ1nQMsaYEiAPaF3DPnNr2CcAInKj/4mFu3btCjP00CV6E+mc0rne3AQYqkGDYMkSm0Q+/hh697bzOjy7UqomjbZx3Bgz3RiTZYzJatOmjavHqq9dcmvi9dpH0K5da28c/OUv4dRTYenSWEemlKrP3Ewc24DOAe87OcuClhERL5AK7Klhn51q2GfUNdTE4Xf88bbhfPZs2LEDBg+27SAFBbGOTClVH7mZOL4EeohINxFJAMYAcyuVmQtc48xfDnxkqrkhwhizHcgXkSFOb6pxwNuRDz08vnQfewr3sP9ww+2mJAKXXWa77t58M0ydai9fzZ6tjedKqYpcSxxOm8VtwHxgHfCGMWaNiDwiIpc4xV4AWotIDvb5HuVddp0nDU4GxotIbkCPrJ8DzwM5wEbgPbc+Q6j8Pas27m1Y7RzBpKbamwY/+wzS0mD0aDjtNPjkk1hHppSqL1wdq8oYMw+YV2nZQwHzh4HRVWzbtYrlS4G+kYuy7rqn2VFyc/bmMKjDoBhHExlDhsDy5TBzJjz8MAwfDhdcYO8+79cv1tEppWKp0TaOR1NmWibQMO7lCIfXC9ddB99+C48/bmsh/fvDNddo912lmjJNHBHQIqEFHZI7kLOvcSUOv2bN4N577ZMFf/UrmDULTjgB7r4bdte/kVaUUi7TxBEhDb1nVSjS0mzN49tv4eqr4X//F7p3h0cfhYMHYx2dUipaNHFEiC/N1ygax0PRuTO88AKsXg0jRsADD9inD/7lLzr6rlJNgSaOCOme3p3tB7ZzsKjp/Ovduze89RYsXmxrHrfcAn366PhXSjV2mjgipLxLbgMbeiQSsrNh0SJ7E2F8vB3/avBgWLAg1pEppdygiSNCGtIouW4QsYMlrloFM2bA9u12GJPTT7ePsF2/XmshSjUWmjgixH8vR1Np56hKXByMHw/ffAN/+hMcPgz33WeHcO/ZE+65BxYu1MEUlWrINHFESGpSKhnNM5psjaOyZs1sd93ly+09H9OmQWamHcrkzDPhuOPgZz+z7SH5+bGOVikVDk0cEeRL9zXaeznq4vjj7RMH338f9uyx419ddBG8955tD8nIgPPOg6ef1hsLlWoIXB1ypKnxpftYuHVhrMOo15KT7WCKl11mL1f95z/wzju2Yf322+108slwySV2OvlkKC21Zf2v/qmm9yUl9sFVmZl2DC6lVGRo4oggX5qPV1e9ypGSIyR6E2MdTr3n9cKwYXZ64gnYsOFoEnn0Ufjd7yJ3rNatbQIJNnXubNtmlFKh0cQRQb50HwbD5v2bOTHjxFiH0+D07Hm0AX3PHpg3D7ZssV18vV775e71Hp1CeZ+XZ4dK8U/LlsGbb1ZsnPd6oWvX4EnF57O1pIaosND2dktKinUkqrHRxBFB3dOPjpKriaNuWre2jeduKCmBbdtg48aKSWXTJttYv6fSo8Q6dbI3O/bubXuH+V9bV/eQ4wgrLrZx7d597LRrV/Dlhw7ZpHjyyXa04yFD7BD5mZk2oShVW5o4Iqip38vRUHi90KWLnc4669j1/lrKxo12XK61a+0DrqZPt1/Gfm3bHk0kgUmlXbuav5gPHbJf+KFMu3fbmKqSkmI7GGRkQPv2cNJJR9/n59tny8+caXu2gV3uTyJDhsAppzTcWpWKDU0cEdS6WWtSE1M1cTRwqakwYICdApWVwX//a5OIP5msXQuvvVbxiz019WgiSU8PngyqGhQyPh7atDk6de1qX/2JIHA+I8PWehISav5MpaWwZg18/rntkPD55/Cvf9l1Hg/07VuxVnLCCXa5UsFINU9qbTSysrLM0qVLo3Os6Vm0adGG98bG/MGEKkqMsc9qD0wm/ik/337Zt21bMSEEm9q2tbWHaF1G2rsXvvjiaDJZsuRoAmzVyg4bM2iQbefp3t1O7du7m1D27bM1Pf/k9dpLlu3bu3dMVTURWWaMyTpmuSaOyBozewzLti/j29u/jcrxlIqUsjI7NMznnx9NJmvX2uV+zZrZNhJ/Iune/Whi6dLF1piq40+yOTkVE4T//d69x27j9dru27fdZsdF0/aZ6KkqceilqgjrntadN9e9SXFpMfFxNfwVKVWPeDxH22smTLDLiovtTZmVv+A3boR//9v23PKLi7M3ewYmlBYtKm67aVPFdiKPxyYcn8/eDBqYiDIz7Zhnzz4LL75oHyDWrx/ceiuMHWv3rWJDaxwRNmPFDCbMnUDO7TnlvayUaoyMsV/swZJKYO0hKeloLSXwspfPF1otBWyyee01O7rAV1/ZdqRrr7UjEvTo4e7nbMq0xhElgT2rNHGoxkwEOnSw07Bhx67fv992AohEu0jz5nD99XDddfDZZzaBPP00TJlih6u57TY4//zI3chZVGRrR7t22faetDQ7NW+ul8pAE0fENeXncigVqFUrO0WSiG3nyM6GyZPhuefgr3+1Q/p362YfJjZhQmj32PhrTN98Y0ct2LDh6PzmzbYnWmXx8RUTSU1Thw62RtTYko1eqoowYwwtH2vJTYNuYvJ5k6NyTKWasuJi+yTKadPgk0/spbGrrrJtIYMGwYEDRxNC4Os330BBwdH9NGtmuyH37Hn09bjjbE+zfftqnvbvr9iRwK9NGxg6FM44w04nn9xwhrjRS1VRIiJ0T+uu93IoFSXx8TB6tJ1Wr4ZnnoGXXrIPFMvIsDdQ+onYe2NOOMHWWvwJomdP6NixbpfUyspskgpMJps22adjLlwIc+bYcsnJ9tjDhtlEcsopkNjAhrbTGocLfjLrJ6zfvZ61t66N2jGVUkfl5dm75b/6yjbC+2sRPl/sxu7KzbVJxJ9I1qyxyxMT7T0z/kRy2mmh3clfWAg7d9ruzVVNP/xgRz+obQ1H7+OIYuK499/3MnXJVA795hAe0dtvlVLH2rMHFi8+mkyWLbPtKnFxdtSCYcNs1+jdu4MnhWDD0IjYWla7dkenZ5+tfddlvVQVRb50H0dKj7AtfxudUzvHOhylVD3UujWMGmUnsJe5Pv/c1kYWLbJf+IcP23UpKTYJHHecvZfl3HMrJgf/1KZNaN2b60oThwv8zx/P2ZujiUMpFZKWLeGcc+wEcOSI7fXVtq3tBlyf6HUUF+gouUqpukpMtA359S1pgCYOV3RK6URCXIImDqVUo6SJwwVxnjgy0zL1JkClVKOkicMlvnSf1jiUUo2SJg6X+G8CbArdnZVSTYuriUNERorIBhHJEZGJQdYnisgsZ/0SEekasO7XzvINInJewPItIrJaRFaKSPRuzgiTL93HweKD7Dy4M9ahKKVURLmWOEQkDpgGnA/0Bq4Skd6Vil0H7DPG+IA/A4872/YGxgB9gJHAM87+/EYYY/oHuzGlvigf7HCvtnMopRoXN2scpwI5xphNxpgi4HVgVKUyo4CZzvxs4GwREWf568aYI8aYzUCOs78GQ7vkKqUaKzcTR0fgu4D3uc6yoGWMMSVAHtC6hm0N8IGILBORG6s6uIjcKCJLRWTprl276vRBaqNLahfiJE4Th1Kq0WmIjeNDjTEDsZfAbhWRM4IVMsZMN8ZkGWOy2rRpE90Igfi4eLq06sKCLQsoLi2O+vGVUsotbiaObUDgeBudnGVBy4iIF0gF9lS3rTHG//oDMId6fAnrvuz7+PS7T/npP39KSVlJrMNRSqmIcDNxfAn0EJFuIpKAbeyeW6nMXOAaZ/5y4CNj+6/OBcY4va66AT2AL0SkhYgkA4hIC+Bc4GsXP0Od3DjoRiafO5nZa2czbs44SsuCPFJMKaUaGNcGOTTGlIjIbcB8IA540RizRkQeAZYaY+YCLwAvi0gOsBebXHDKvQGsBUqAW40xpSJyHDDHtp/jBV4zxrzv1meIhLtOu4vismLu+/A+vB4vM0bNIM7TQB7/pZRSQejzOKLk9wt/z4MLHmRC/wk8d8lz+pwOpVS9p8/jiLEHzniA4tJiHln4CPFx8Tx74bNIY3uCvVKqSdDEEUWThk+iuKyYxxY/htfj5anzn9LkoZRqcDRxRJGI8OhZj1JcWsyT/3mSeE88k8+brMlDKdWgaOKIMhHhiR89QXFZMVOWTCE+Lp7Hz3lck4dSqsHQxBEDIsKfz/szxaXF/PGzP5IQl8DvRvxOk4dSqkHQxBEjIsJTFzxFcVkxjy56lHhPPA8PfzjWYSmlVI00ccSQRzz85aK/UFxWzKRPJhEfF8/9w+6PdVhKKVUtTRwx5hEPz1/8PCVlJfzmo9+QEJfAPaffE+uwlFKqSpo46oE4TxwzRs2gpKyEX/37V3g9Xu4ccmesw1JKqaA0cdQTXo+Xly99meLSYu6afxfxnnhuPfXWWIellFLH0HEv6hGvx8vfL/s7o3qO4rb3bmP6sumxDkkppY6hiaOeiY+LZ9bls7igxwXc9K+beOSTR9h1MPoPolJKqapo4qiHEr2JvHnFm1x64qU8/PHDdJzckSv+cQUfbPyAMlMW6/CUUk2cjo5bz635YQ0vrHiBl756iT2Fe+iS2oUJAyZwbf9r6ZzaueYdKKVULVU1Oq4mjgbiSMkR3t7wNs8vf55/b/o3gjDSN5LrB17PRSdcREJcQqxDVEo1Mpo4GnjiCLR532ZmrJzBiyteZFvBNtq2aMs1J1/DdQOuo2dGz1iHp5RqJDRxNKLE4VdaVsr8jfN5fvnzvPPNO5SUlTDs+GFcN+A6RvcZTfP45rEOUSnVgGniaISJI9COAzt46auXeH7583y791tSElP4ad+fclnvyxjYfiDpzdJjHaJSqoHRxNHIE4efMYZF/13E88uf5x9r/8HhksMAHJ96PAPaDbBTe/vaKaWTjsirlKqSJo4mkjgC5R3O44ttX7Bixwo7bV/BN3u+wWB/5q2btS5PIv6E0iO9B3GeuBhHrpSqDzRxNMHEEcyBogOs2rmKFdtXlCeUr3/4mqLSIgBaxLeg33H9yhNJr4xedEzpSIfkDtpzS6kmRhOHJo4qFZUWsW7XuvJayYodK1i5YyUFRQUVyrVt0ZaOyR3pmNKRjskd6ZTSqcL7jikdSU1M1ctfSjUSmjg0cYSlzJSxad8mcvbmsC1/G9sKtrEtfxu5Bbnl73cf2n3Mdi3iW1RIJO1atCOtWRrpzdJJS7Kv6c3Sy5elJKbgER3AQKn6qKrEoaPjqqA84sGX7sOX7quyzJGSI3xf8D25+bnliWVbgZ1y83NZtHURuw7t4lDxoWqP0yqpVXlS8SeU9CQ73yqpFamJqfY1KZXUxNQKr828zbSGo1SUaeJQtZboTaRbWje6pXWrttzhksPsK9zHvsP72Fu4l32F9nVv4d6jywLWbd63uXxZTWNzeT3e8iTiTzLliaVSkqnqVZOPUuHRxKFcl+RNon1ye9ontw9ruzJTxoGiA+QdziPvSF7Q1/2H99v5gOUb924sf59/JL+8F1lVApNP5deUhBSSE5NpmdCS5ITkY+aTE5z3znxCXIImIdXoaeJQ9ZZHPKQkppCSmEJnajegYyjJp8KrM795/+byxFNQVEBJWUlIx/N6vBUSTMuEljTzNqNZfDOaxze3815nPr7ZsesqzTfzNiMhLoFEb6J9jUusMK+JSsWCJg7VqEUi+RhjKCotoqCogIIjBRwoOlA+X1DkvHfmK6wvKuBg0UEKSwrZfWg3hcWFFJYUUlhcyKHiQxSWFJZ3g66LeE98tYklPi7evnrij5mP91S9zj8fuI/AZZX3XdUyr8dbfiz/vNfj1U4RDZgmDqVqICIkeu0XckbzjIjuu7SstDyZFJY4CSUgwfiTy5GSI/a19EiFef+6oPPOa3FpMUWlRRSWFJJ/JJ/isuLy5dXNu80jnmOSiT9pVZ4PnPzbVFgWWE4qrovzxBEncRXmA1+9Hm+Ny/zbBi4PNl/d8TziOeY4Va3ziKde1yQ1cSgVQ3GeuPJLWvWJMYaSshKKy4rLE09RaVGF5FJ5WeDywGUlZSUUlxZX2J9/3r8ucL7EHF1WXFpMqSmtsI+SshIOlxwun/dvGzgFli01pZSWlZbvp6E8DM2fTDzisfOeo/PhrFt+03KSvEkRjU0Th1LqGCJiLy/FxUN8rKOJLGMMZaasPKFUTi6BSaY284H788+XmbJj9h/4GhhP5VeDKS/jL+ef9++3zJRRRqX3zhQnkR9CSBOHUqpJERF7WYg40GHZakVbp5RSSoXF1cQhIiNFZIOI5IjIxCDrE0VklrN+iYh0DVj3a2f5BhE5L9R9KqWUcpdriUNE4oBpwPlAb+AqEeldqdh1wD5jjA/4M/C4s21vYAzQBxgJPCMicSHuUymllIvcrHGcCuQYYzYZY4qA14FRlcqMAmY687OBs8X2QRsFvG6MOWKM2QzkOPsLZZ9KKaVc5Gbi6Ah8F/A+11kWtIwxpgTIA1pXs20o+wRARG4UkaUisnTXrl11+BhKKaUCNdrGcWPMdGNMljEmq02bNrEORymlGg03E8c2qDDGQydnWdAyIuIFUoE91Wwbyj6VUkq5yM3E8SXQQ0S6iUgCtrF7bqUyc4FrnPnLgY+MfbLUXGCM0+uqG9AD+CLEfSqllHKRazcAGmNKROQ2YD72NpsXjTFrROQRYKkxZi7wAvCyiOQAe7GJAKfcG8BaoAS41RhTChBsnzXFsmzZst0isrWWHyUDOPZRd/WHxlc3Gl/daHx1U9/j6xJsYZN4dGxdiMjSYI9OrC80vrrR+OpG46ub+h5fVRpt47hSSil3aOJQSikVFk0cNZse6wBqoPHVjcZXNxpf3dT3+ILSNg6llFJh0RqHUkqpsGjiUEopFRZNHI66DAEfhdg6i8gCEVkrImtE5BdBygwXkTwRWelMD0UrPuf4W0RktXPspUHWi4hMdc7fKhEZGMXYegacl5Uiki8id1YqE9XzJyIvisgPIvJ1wLJ0Efm3iHzrvKZVse01TplvReSaYGVciu+PIrLe+fnNEZFWVWxb7e+Ci/FNEpFtAT/DC6rY1vVHM1QR36yA2LaIyMoqtnX9/NWZMabJT9ibCTcCmUAC8BXQu1KZnwN/cebHALOiGF97YKAznwx8EyS+4cC/YngOtwAZ1ay/AHgPEGAIsCSGP+sdQJdYnj/gDGAg8HXAsieAic78RODxINulA5uc1zRnPi1K8Z0LeJ35x4PFF8rvgovxTQLuCeHnX+3fulvxVVr/J+ChWJ2/uk5a47DqMgS864wx240xy535AmAdVYwKXI+NAl4y1udAKxFpH4M4zgY2GmNqO5JARBhjFmJHSwgU+Ds2E/hxkE3PA/5tjNlrjNkH/Bv7zBrX4zPGfGDsKNYAn2PHiouJKs5fKKLyaIbq4nO+N64A/h7p40aLJg6rLkPAR5VziWwAsCTI6tNE5CsReU9E+kQ1MDDAByKyTERuDLI+5CHxXTaGqv9gY3n+AI4zxmx35ncAxwUpU1/O4wRsDTKYmn4X3HSbcyntxSou9dWH8zcM2GmM+baK9bE8fyHRxNGAiEhL4E3gTmNMfqXVy7GXX04GngLeinJ4Q40xA7FPZ7xVRM6I8vFr5AyMeQnwjyCrY33+KjD2mkW97CsvIr/BjiH3ahVFYvW78CzQHegPbMdeDqqPrqL62ka9/1vSxGHVZQj4qBCReGzSeNUY88/K640x+caYA878PCBeRDKiFZ8xZpvz+gMwB3tJIFB9GBL/fGC5MWZn5RWxPn+Onf7Ld87rD0HKxPQ8ish44CJgrJPcjhHC74IrjDE7jTGlxpgy4Lkqjhvr8+cFfgLMqqpMrM5fODRxWHUZAt51zjXRF4B1xpjJVZRp529zEZFTsT/bqCQ2EWkhIsn+eWwj6teVis0Fxjm9q4YAeQGXZaKlyv/0Ynn+AgT+jl0DvB2kzHzgXBFJcy7FnOssc52IjATuBS4xxhyqokwovwtuxRfYZnZpFceN9aMZzgHWG2Nyg62M5fkLS6xb5+vLhO318w22x8VvnGWPYP9IAJKwlzhysM8GyYxibEOxly1WASud6QLgZuBmp8xtwBpsL5HPgdOjGF+mc9yvnBj85y8wPgGmOed3NZAV5Z9vC2wiSA1YFrPzh01g24Fi7HX267BtZv8HfAt8CKQ7ZbOA5wO2neD8HuYA10Yxvhxs+4D/d9Dfy7ADMK+634Uoxfey87u1CpsM2leOz3l/zN96NOJzlv/N/zsXUDbq56+ukw45opRSKix6qUoppVRYNHEopZQKiyYOpZRSYdHEoZRSKiyaOJRSSoVFE4dStSQipZVG3Y3YSKsi0jVwZFWl6hNvrANQqgErNMb0j3UQSkWb1jiUijDneQpPOM9U+EJEfM7yriLykTMI3/+JyPHO8uOc51t85UynO7uKE5HnxD6D5QMRaeaUv0Pss1lWicjrMfqYqgnTxKFU7TWrdKnqyoB1ecaYk4CngSnOsqeAmcaYftgBAqc6y6cCnxg7wOJA7B3DAD2AacaYPsB+4DJn+URggLOfm936cEpVRe8cV6qWROSAMaZlkOVbgLOMMZucwSl3GGNai8hu7DAYxc7y7caYDBHZBXQyxhwJ2EdX7HM3ejjv7wPijTG/F5H3gQPYEXzfMs7gjEpFi9Y4lHKHqWI+HEcC5ks52iZ5IXbcr4HAl86Iq0pFjSYOpdxxZcDrf5z5z7CjsQKMBRY58/8H3AIgInEiklrVTkXEA3Q2xiwA7sMO739MrUcpN+l/KkrVXjMRWRnw/n1jjL9LbpqIrMLWGq5ylt0OzBCRXwG7gGud5b8ApovIddiaxS3YkVWDiQNecZKLAFONMfsj9omUCoG2cSgVYU4bR5YxZnesY1HKDXqpSimlVFi0xqGUUiosWuNQSikVFk0cSimlwqKJQymlVFg0cSillAqLJg6llFJh+X9GClBX8oKJ3AAAAABJRU5ErkJggg==\n",
"text/plain": [
"