diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/README.md b/Course 2: Convolutional Neural Networks in Tensorflow/README.md index cbe0dd4..e095e60 100644 --- a/Course 2: Convolutional Neural Networks in Tensorflow/README.md +++ b/Course 2: Convolutional Neural Networks in Tensorflow/README.md @@ -55,6 +55,10 @@

+

+ +

+ - [Programming assignment](). ### Week 4 diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/Week 2/Exercise_2_Cats_vs_Dogs_using_augmentation_Question-FINAL.ipynb b/Course 2: Convolutional Neural Networks in Tensorflow/Week 2/Exercise_2_Cats_vs_Dogs_using_augmentation_Question-FINAL.ipynb new file mode 100644 index 0000000..2462b88 --- /dev/null +++ b/Course 2: Convolutional Neural Networks in Tensorflow/Week 2/Exercise_2_Cats_vs_Dogs_using_augmentation_Question-FINAL.ipynb @@ -0,0 +1,499 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "dn-6c02VmqiN" + }, + "outputs": [], + "source": [ + "# ATTENTION: Please do not alter any of the provided code in the exercise. Only add your own code where indicated\n", + "# ATTENTION: Please do not add or remove any cells in the exercise. The grader will check specific cells based on the cell position.\n", + "# ATTENTION: Please use the provided epoch values when training.\n", + "\n", + "# In this exercise you will train a CNN on the FULL Cats-v-dogs dataset\n", + "# This will require you doing a lot of data preprocessing because\n", + "# the dataset isn't split into training and validation for you\n", + "# This code block has all the required inputs\n", + "import os\n", + "import zipfile\n", + "import random\n", + "import shutil\n", + "import tensorflow as tf\n", + "from tensorflow.keras.optimizers import RMSprop\n", + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "from tensorflow.keras.models import Sequential\n", + "from tensorflow.keras.layers import Flatten, Dense, Conv2D, MaxPooling2D\n", + "from tensorflow.keras.callbacks import Callback\n", + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "from shutil import copyfile\n", + "from os import getcwd" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "3sd9dQWa23aj" + }, + "outputs": [], + "source": [ + "# This code block unzips the full Cats-v-Dogs dataset to /tmp\n", + "# which will create a tmp/PetImages directory containing subdirectories\n", + "# called 'Cat' and 'Dog' (that's how the original researchers structured it)\n", + "path_cats_and_dogs = f\"{getcwd()}/../tmp2/cats-and-dogs.zip\"\n", + "shutil.rmtree('/tmp')\n", + "\n", + "local_zip = path_cats_and_dogs\n", + "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", + "zip_ref.extractall('/tmp')\n", + "zip_ref.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "gi3yD62a6X3S" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1500\n", + "1500\n" + ] + } + ], + "source": [ + "print(len(os.listdir('/tmp/PetImages/Cat/')))\n", + "print(len(os.listdir('/tmp/PetImages/Dog/')))\n", + "\n", + "# Expected Output:\n", + "# 1500\n", + "# 1500" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "F-QkLjxpmyK2" + }, + "outputs": [], + "source": [ + "# Use os.mkdir to create your directories\n", + "# You will need a directory for cats-v-dogs, and subdirectories for training\n", + "# and testing. These in turn will need subdirectories for 'cats' and 'dogs'\n", + "try:\n", + " #YOUR CODE GOES HERE\n", + " main_dir = \"/tmp/cats-v-dogs/\"\n", + " \n", + " train_dir = os.path.join(main_dir, \"training\")\n", + " test_dir = os.path.join(main_dir, \"testing\")\n", + " \n", + " cats_train = os.path.join(train_dir, \"cats\")\n", + " dogs_train = os.path.join(train_dir, \"dogs\")\n", + " \n", + " cats_test = os.path.join(test_dir, \"cats\")\n", + " dogs_test = os.path.join(test_dir, \"dogs\")\n", + " \n", + " os.mkdir(main_dir)\n", + " \n", + " os.mkdir(train_dir)\n", + " os.mkdir(test_dir)\n", + " \n", + " os.mkdir(cats_train)\n", + " os.mkdir(dogs_train)\n", + " \n", + " os.mkdir(cats_test)\n", + " os.mkdir(dogs_test)\n", + "except OSError:\n", + " pass" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "zvSODo0f9LaU" + }, + "outputs": [], + "source": [ + "# Write a python function called split_data which takes\n", + "# a SOURCE directory containing the files\n", + "# a TRAINING directory that a portion of the files will be copied to\n", + "# a TESTING directory that a portion of the files will be copie to\n", + "# a SPLIT SIZE to determine the portion\n", + "# The files should also be randomized, so that the training set is a random\n", + "# X% of the files, and the test set is the remaining files\n", + "# SO, for example, if SOURCE is PetImages/Cat, and SPLIT SIZE is .9\n", + "# Then 90% of the images in PetImages/Cat will be copied to the TRAINING dir\n", + "# and 10% of the images will be copied to the TESTING dir\n", + "# Also -- All images should be checked, and if they have a zero file length,\n", + "# they will not be copied over\n", + "#\n", + "# os.listdir(DIRECTORY) gives you a listing of the contents of that directory\n", + "# os.path.getsize(PATH) gives you the size of the file\n", + "# copyfile(source, destination) copies a file from source to destination\n", + "# random.sample(list, len(list)) shuffles a list\n", + "def split_data(SOURCE, TRAINING, TESTING, SPLIT_SIZE):\n", + "# YOUR CODE STARTS HERE\n", + " data = os.listdir(SOURCE)\n", + " data = random.sample(data, len(data)) # shuffled\n", + " for count, file in enumerate(data):\n", + " if(count < SPLIT_SIZE * len(data)) and os.path.getsize(f\"{SOURCE}/{file}\")!=0:\n", + " copyfile(f\"{SOURCE}/{file}\", f\"{TRAINING}/{file}\")\n", + " elif (count >= SPLIT_SIZE * len(data)) and os.path.getsize(f\"{SOURCE}/{file}\")!=0:\n", + " copyfile(f\"{SOURCE}/{file}\", f\"{TESTING}/{file}\")\n", + "# YOUR CODE ENDS HERE\n", + "\n", + "\n", + "CAT_SOURCE_DIR = \"/tmp/PetImages/Cat/\"\n", + "TRAINING_CATS_DIR = \"/tmp/cats-v-dogs/training/cats/\"\n", + "TESTING_CATS_DIR = \"/tmp/cats-v-dogs/testing/cats/\"\n", + "DOG_SOURCE_DIR = \"/tmp/PetImages/Dog/\"\n", + "TRAINING_DOGS_DIR = \"/tmp/cats-v-dogs/training/dogs/\"\n", + "TESTING_DOGS_DIR = \"/tmp/cats-v-dogs/testing/dogs/\"\n", + "\n", + "split_size = .9\n", + "split_data(CAT_SOURCE_DIR, TRAINING_CATS_DIR, TESTING_CATS_DIR, split_size)\n", + "split_data(DOG_SOURCE_DIR, TRAINING_DOGS_DIR, TESTING_DOGS_DIR, split_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "luthalB76ufC" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1350\n", + "1350\n", + "150\n", + "150\n" + ] + } + ], + "source": [ + "print(len(os.listdir('/tmp/cats-v-dogs/training/cats/')))\n", + "print(len(os.listdir('/tmp/cats-v-dogs/training/dogs/')))\n", + "print(len(os.listdir('/tmp/cats-v-dogs/testing/cats/')))\n", + "print(len(os.listdir('/tmp/cats-v-dogs/testing/dogs/')))\n", + "\n", + "# Expected output:\n", + "# 1350\n", + "# 1350\n", + "# 150\n", + "# 150" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "-BQrav4anTmj" + }, + "outputs": [], + "source": [ + "# DEFINE A KERAS MODEL TO CLASSIFY CATS V DOGS\n", + "# USE AT LEAST 3 CONVOLUTION LAYERS\n", + "model = tf.keras.models.Sequential([\n", + "# YOUR CODE HERE\n", + " Conv2D(16, (3,3), activation = 'relu', input_shape = (150,150,3)),\n", + " MaxPooling2D(2,2),\n", + " Conv2D(32, (3,3), activation = 'relu'),\n", + " MaxPooling2D(2,2),\n", + " Conv2D(64, (3,3), activation = 'relu'),\n", + " MaxPooling2D(2,2),\n", + " Flatten(),\n", + " Dense(512, activation = 'relu'),\n", + " Dense(1, activation = 'sigmoid')\n", + "])\n", + "\n", + "model.compile(optimizer=RMSprop(lr=0.001), loss='binary_crossentropy', metrics=['acc'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# NOTE:\n", + "\n", + "In the cell below you **MUST** use a batch size of 10 (`batch_size=10`) for the `train_generator` and the `validation_generator`. Using a batch size greater than 10 will exceed memory limits on the Coursera platform." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "mlNjoJ5D61N6" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 2700 images belonging to 2 classes.\n", + "Found 300 images belonging to 2 classes.\n" + ] + } + ], + "source": [ + "TRAINING_DIR = train_dir#YOUR CODE HERE\n", + "train_datagen = ImageDataGenerator(\n", + " rescale = 1./255,\n", + " rotation_range = 40,\n", + " width_shift_range = 0.2,\n", + " height_shift_range = 0.2,\n", + " shear_range = 0.2,\n", + " horizontal_flip = True,\n", + " fill_mode = 'nearest'\n", + "\n", + ") #YOUR CODE HERE\n", + "\n", + "# NOTE: YOU MUST USE A BATCH SIZE OF 10 (batch_size=10) FOR THE \n", + "# TRAIN GENERATOR.\n", + "train_generator = train_datagen.flow_from_directory(\n", + " TRAINING_DIR,\n", + " target_size = (150, 150),\n", + " batch_size = 10,\n", + " class_mode = 'binary'\n", + " )#YOUR CODE HERE\n", + "\n", + "VALIDATION_DIR = test_dir #YOUR CODE HERE\n", + "validation_datagen = ImageDataGenerator(\n", + " rescale = 1./255,\n", + " rotation_range = 40,\n", + " width_shift_range = 0.2,\n", + " height_shift_range = 0.2,\n", + " shear_range = 0.2,\n", + " horizontal_flip = True,\n", + " fill_mode = 'nearest'\n", + ") #YOUR CODE HERE\n", + "\n", + "# NOTE: YOU MUST USE A BACTH SIZE OF 10 (batch_size=10) FOR THE \n", + "# VALIDATION GENERATOR.\n", + "validation_generator = validation_datagen.flow_from_directory(\n", + " VALIDATION_DIR,\n", + " target_size = (150, 150),\n", + " batch_size = 10,\n", + " class_mode = 'binary'\n", + " )#YOUR CODE HERE\n", + "\n", + "# Expected Output:\n", + "# Found 2700 images belonging to 2 classes.\n", + "# Found 300 images belonging to 2 classes." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "KyS4n53w7DxC" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/2\n", + "270/270 [==============================] - 60s 221ms/step - loss: 7.4806 - acc: 0.4978 - val_loss: 0.7257 - val_acc: 0.5000\n", + "Epoch 2/2\n", + "270/270 [==============================] - 55s 205ms/step - loss: 0.7067 - acc: 0.5252 - val_loss: 0.6813 - val_acc: 0.5067\n" + ] + } + ], + "source": [ + "history = model.fit_generator(train_generator,\n", + " epochs=2,\n", + " verbose=1,\n", + " validation_data=validation_generator)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "MWZrJN4-65RC" + }, + "outputs": [ + { + "data": { + "text/plain": [ + "Text(0.5, 1.0, 'Training and validation loss')" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAccAAAEICAYAAAAqQj/TAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAZBElEQVR4nO3deZQlZZ3m8e+TmSAgm1CIiEKp4EILbjW2G90ujCI6oCONgBu2qI0rbevouNI22nq07dPaDkozriiLKMpoqyCiKAJarAJuiIiAKLsIDVKVv/kjIqnLay63qrIyK6u+n3PuuXEj3hvxeyOz7lPxRtyMVBWSJGmFkfkuQJKktY3hKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR2kISUaT/DHJDrPZdj4l2SnJrH+XK8keSS4feP2zJLsP03YVtnVUkreu6vulqYzNdwHSmpDkjwMvNwHuAJb3r19ZVZ9bmfVV1XJg09luuz6oqofMxnqSHAy8sKqePLDug2dj3VLLcNQ6qaruCqf+yOTgqvrWVO2TjFXVsrmoTZqJv4/zz2FVrZeSHJ7kuCTHJLkFeGGSxyc5K8lNSX6b5MNJNujbjyWpJIv710f3y7+e5JYkZyZ5wMq27Zc/M8nPk9yc5CNJzkhy0BR1D1PjK5NcmuTGJB8eeO9okn9Ncn2Sy4A9p9k/b0tybDPvo0k+1E8fnOQnfX9+2R/VTbWuK5M8uZ/eJMln+9ouBh7TtH17ksv69V6cZO9+/q7AvwO790PW1w3s28MG3v93fd+vT/LlJNsNs29WZj9P1JPkW0luSHJNkv81sJ139PvkD0mWJrnvZEPYSb4/8XPu9+fp/XZuAN6eZOckp/XbuK7fb1sMvH/Hvo/X9sv/LclGfc0PG2i3XZLbkmw9VX/15wxHrc+eC3we2AI4DlgGvB5YBDyRLjxeOc37DwTeAWwFXAH808q2TXJv4HjgTf12fwU8dpr1DFPjXnSh8yi60N+jn38I8HTgEcB/A/abZjvHAM9Ocs++zjHgb+j2F8DvgGcBmwMvBz6SZLdp1jfh3cD9gQf2db6kWf7zvl9bAO8BPp9k26r6MfAa4HtVtWlVLWpXnOTp/fr3BbYHrgba4fOp9k1ryv3cB9S3gP8HbAc8GPhO/7439dvfE9gSOBi4fbodMuAJwE+AbYD3AwEOB+4D7EK3z97R1zAGfA24FFhMt0+Pr6rb6X6fXjiw3gOBb1bV9UPWIYCq8uFjnX4AlwN7NPMOB749w/veCHyhnx4DCljcvz4a+NhA272Bi1ah7d/SfeBPLAvwW+CgIfs2WY2PG1j+JeCN/fTpdMPLE8v26j4Cplz3WcCB/fQzgZ9N0/arwKv76T2AyweWXQk8uZ++YvBnAbxqsO0k670IeFY/fTDwnWb50cBh/fSngfcOLNuc7jzz/WbaNyu5n18E/GiKdr+cqLeZv1O7r4HvT/yc+75dNkMN+05sF9gduAYYnaTdE+n+k5X+9fnA/5ztf1fr+sMjR63PfjP4IslDk3ytHyb7A91RyJ8doQy4ZmD6Nqa/CGeqtvcdrKO6T7Mrp1rJkDUOtS3g19PUC91R4gH99IGsOGokybOTnN0P+d1Ed0Q63b6asN10NSQ5KMkF/dDgTcBDh1wvdP27a31V9QfgRrqjyAlD/cxm2M/3pwvByUy3bCbt7+N9khyf5Kq+hk81NVxe3cVfd1NVZ9Ad+T4pycOBHeiOMrUSDEetz9qvMXyc7khlp6raHHgn3ZHcmvRbuiMbAJKEu3+Yt1anxt/SfahOmOmrJscDeyTZHtiHPhyTbAycAPwzsG1VbQmcPGQd10xVQ5IHAkfQDf9u3a/3pwPrnelrJ1cDOw6sbzPgXsBVQ9TVmm4//wZ40BTvm2rZrX1NmwzMu0/Tpu3f++must61r+GgpoYdk4xOUcdn6IZWX0Q33HrHFO00BcNRWmEz4Gbg1v6ChunON86WrwKPTvI/+vNIr6c757QmajweODTJ9v3FGW+ernFVXUM39PcpuiHVX/SL7gFsCFwLLE/ybOBpK1HDW5Nsme57oK8ZWLYpXUBcS/f/hJfTHTlO+B1wv8ELYxrHAC9LsluSe9CF9/eqasoj8WlMt59PAnZI8pok90iyeZKJ88RHAYcneVA6j0yyFd1/Cq6hO885muQVDAT5NDXcCtyc5P50Q7sTzgSuB96b7iKnjZM8cWD5Z+mGYQ+kC0qtJMNRWuEf6C4QuYXuyOG4Nb3Bqvod8HzgQ3Qfdg8CzqM7YpjtGo8ATgV+DPyI7uhvJp+nO4d415BqVd0E/D1wInAD3YfwV4es4V10R7CXA19n4IO7qi4EPgL8sG/zEODsgfeeAvwC+F2SweHRifd/g27488T+/TsALxiyrtaU+7mqbgb+O/A8usD+OfDX/eIPAF+m289/AI4ENuqHy18OvBW4ju4c5GDfJvMuuouzbqYL5C8O1LAMeDbwMLqjyCvofg4Tyy+n+znfUVU/WMm+ixUnbCWtBfphsquBfavqe/NdjxauJJ+hu8jnsPmuZSHyjwBI8yzJnnRXhv4X8L+BO+mOnqRV0p+/3QfYdb5rWagcVpXm35OAy+jOtT0DeK4XUGhVJfln4AK6r7VcMd/1LFQOq0qS1PDIUZKkhucc1xGLFi2qxYsXz3cZkrRgnHPOOddV1aRfnTIc1xGLFy9m6dKl812GJC0YSab8K1EOq0qS1DAcJUlqGI6SJDUMR0mSGoajJEmNacMxyWlJntHMOzTJETO874/9832TTPrHjZN8J8mSGdZz6OAtXpL8Z5Itp3vPykhyfpJjZ2t9kqR1w0xHjscA+zfz9u/nz6iqrq6qfWduOaVDgbvCsar26u8IsNr629CMArsnuedsrHOK7fh1GUlaYGYKxxOAZyXZECDJYrq7bX8vyaZJTk1ybpIfJ9mnfXOSxUku6qc3TnJskp8kORHYeKDdEUmWJrk4yT/2817Xb+u0JKf18y5PsqiffkOSi/rHoQPb+0mS/+jXdXJ/Y9bJHEB3z7OT6f5A70QtOyX5Vn838nOTPKif/+a+nxckeV8/766j3ySLklzeTx+U5KQk3wZOnW5fJXlxkgv79X42yWZJfjVxz7r+XnF3vZYkrXnTHtVU1Q1Jfgg8E/gK3VHj8VVVSW6n+wPJf+gD66wkJ9XUf6z1EOC2qnpYkt2AcweWva3f1ihdmOxWVR9O8gbgKVV13eCKkjwGeCnwl3R3xj47yXeBG4GdgQOq6uVJjqe759rRk9TzfLp7sj0UeC0r7lf3OeB9VXViko2AkSTPpAvQv6yq2/qbl87k0cBufb/GJttXwC7A24EnVNV1SbaqqluSfAd4Ft194fYHvlRVd7Yb6G+Y+gqAHXaY6abukqRhDXNBzuDQ6uCQaujuQn0h8C1ge2DbadbzV/Qh1d/U9MKBZfslOZfuJq9/QRca03kScGJV3VpVfwS+BOzeL/tVVZ3fT58DLG7f3B/tXdf/xfpTgUcl2SrJZsD2VXViX+ftVXUb3c1eP9lPU1U3zFAfwCkD7abaV08FvjAR/gPtj6ILf/rnT062gao6sqqWVNWSbbaZ7ubxkqSVMUw4fgV4WpJHA5tU1Tn9/BcA2wCPqapH0t0Re6OVLSDJA4A3Ak+rqt2Ar63KegYM3upnOZMfHR8APLQfBv0lsDndEebKWsaKfdjWfOvA9Ertq6o6A1ic5MnAaFVdtAq1SZJW0Yzh2B+ZnQZ8grtfiLMF8PuqujPJU4AdZ1jV6cCBAEkeDuzWz9+cLkhuTrIt3RDuhFuAzSZZ1/eA5yTZpL+Y5rn9vBklGQH2A3atqsVVtZhuyPSAqroFuDLJc/q29+ivlj0FeOnElbMDw6qXA4/pp6e78GiqffVt4G+SbN2sF+AzdEO9kx41SpLWnGG/53gM8AjuHo6fA5Yk+THwYuCnM6zjCGDTJD8B3k035ElVXUA3nPpTujA4Y+A9RwLfmLggZ0JVnQt8iu5u6WcDR1XVeUP2ZXfgqqq6emDe6cAuSbYDXgS8rh8C/QFwn6r6BnASsDTJ+XRHugAfBA5Jch6waJptTrqvqupi4D3Ad5NcAHyoec+9GPLKYEnS7PFmx2upJPsC+1TVi4Zpv2TJkvKuHJI0vCTnVNWk37f3O3hroSQfoRte3mu+a5Gk9ZHhuBaqqtfOdw2StD7zb6tKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJDcNRkqSG4ShJUsNwlCSpYThKktQwHCVJahiOkiQ1DEdJkhqGoyRJjbH5LkCStEBUwfLlcOedsGzZ9M9z1WbTTeGII2a9q4ajJM2Wqpk/7Oc6PIZtM2zbuTYyAmNjsMEG3WNieuL53vdeI5s1HCXNnarZ/7Bem9osXz73+3R09M8DY7IQaZ832qg76pquzTDrWdNtRubn7J/hKK1NxsfXzg/92WozPj73+3RsbNU+rDfZZP6DYaY2o6PzFh7rOsNRC0fV/IbHsmXUn+6k7ly24rFs+d1ejy8b//NlzfP4svG7z5t43LmMAoowzghFZnwM027GNqMbMD66ATW2ATU6tuL5rumNqdGxrs3E/MHHhmPURqPU2Bg10rcbGV2xfGJ64Hl8pJl/1/QI1S8bz+iKZZM9MnL3Nhnpp0eo9M+MMF6hihkf4+Or2OZPUHfMwnpms6Y11GZtrGnbbeHqq2f/48ZwXM8d+PTruP32osaLWt4/T/IYv9truufqp2uiDSte10SbiXkr5o+PZ0Wbqf5RVKhKFxYT86f90B+lGKPYePbD467HOvo/9OX940/zXcj8GRmBZPrHXLaZ7e2Njq59Nc3WftpsszXzO2E4rud+ceqvuX18wxkiYXz62EgYSTF6t1/mkJH+Odw1PTICGRtYPpJ+esXrkYHpFY+Rft5I12Z0YNlov3x0oO1oGBkdufuy0ZHuMdI/9/NGRmvFstGsaDs22te3dn4oWNPs1CRNxnBcz/3olJu7T4hVOQcyjyfLJWlNMhzXd0996nxXIElrHf/bL0lSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1DAcJUlqGI6SJDVWOxyTbJ3k/P5xTZKrBl5vOOQ6PpnkITO0eXWSF6xuvQPr2zbJsiQHz9Y6JUnrhrHVXUFVXQ88EiDJYcAfq+qDg22SBEhVjU+xjpcOsZ2Prm6tjf2AM4EDgKNmed13STJWVcvW1PolSbNvjQ2rJtkpySVJPgdcDGyX5MgkS5NcnOSdA22/n+SRScaS3JTkfUkuSHJmknv3bQ5PcuhA+/cl+WGSnyV5Qj//nkm+2G/3hH5bj5yixAOAQ4EHJtluoJZnJTm33/7J/bzNknw6yYX94zkTtQ68b/8kR/XTRyc5IskPgfcmeVzfl/OSnJFk577dWJJ/TXJRv95XJXl6khMG1vvMJF+YjZ+JJGk4q33kOIOHAi+uqqUASd5SVTckGQNOS3JCVV3SvGcL4LtV9ZYkHwL+FnjfJOtOVT02yd7AO4E9gdcC11TV85I8Ajh3sqKSLAa2qqpz+uDZD/i3JPcBjgB2r6pfJ9mqf8thwLVVtVt/FLzlEH3fDnhcVY0n2aJf57IkewKHA88HDgHuCzyiqpb327sJ+PckW/dH5S8FPjFFP14BvAJghx12GKIkSdIw1vQFOb+cCMbeAUnOpQuthwG7TPKe/6qqr/fT5wCLp1j3lyZp8yTgWICquoDuiHUy+wPH9dPH0h1FAjweOK2qft2v44Z+/h7AR/t5VVU3TrHeQV8YGEbeEvhikouADwJ/MbDej1XV8ont9e/5HHBgH5aPAU6ebANVdWRVLamqJdtss80QJUmShrGmjxxvnZjohxJfDzy2qm5KcjSw0STv+dPA9HKmrvGOIdpM5QBgUZKX9K/vm+SBK7mOcSADr9u+3Dow/R7gm1X1f5LsBHxjhnV/AvhiP33cRHhKkubGXH6VY3PgFuAP/Tm+Z6yBbZxBN0RKkl2Z5Mg0yS7AWFVtX1WLq2ox8AG6o8kfAE9JsmPfdmJY9RTg1f28JLlXf4R3Y5Kdk4wAz52mri2Aq/rpgwbmnwL8XZLRwe1V1W+A64C3AJ9amR0gSVp9cxmO5wKXAD8FPkMXZLPtI8D2SS4B3tVv7+amzQHAic28LwIHVNXv6M4DfiXJBXTDmwD/CGzbD4ueD+zez38z8E26UL1ymrreD3ygH1IePNr8OHANcGG/vf0Gln0e+FVV/Xz6LkuSZluqar5rmDX9hT5jVXV7P4x7MrDzQvwqRZKPAWdW1aeHab9kyZJaunTpzA0lSQAkOaeqlky2bE2fc5xrmwKn9iEZ4JULNBjPB24EXjfftUjS+midCsequonu6s4Fraqm+m6mJGkO+LdVJUlqGI6SJDXWqQty1mdJrgV+vYpvX0T31ZH1iX1e961v/QX7vLJ2rKpJ/4KK4SiSLJ3qiq11lX1e961v/QX7PJscVpUkqWE4SpLUMBwFcOR8FzAP7PO6b33rL9jnWeM5R0mSGh45SpLUMBwlSWoYjuuRJHsm+VmSS5O8ZZLl90hyXL/87CSL577K2TNEf9+Q5JIkFyY5deJWZQvZTH0eaPe8JJVkwV/2P0yfk+zX/6wvTvL5ua5xtg3xu71DktOSnNf/fu81H3XOliSfSPL7/s5Iky1Pkg/3++PCJI9e7Y1WlY/14AGMAr8EHghsCFwA7NK0eRXwsX56f7obLc977Wuwv08BNumnD1nI/R22z327zYDTgbOAJfNd9xz8nHcGzgPu1b++93zXPQd9PhI4pJ/eBbh8vutezT7/FfBo4KIplu8FfJ3uhhOPA85e3W165Lj+eCxwaVVdVlV/Ao4F9mna7ANM3CLrBOBpScLCNGN/q+q0qrqtf3kWcL85rnG2DfMzBvgnunuM3j6Xxa0hw/T55cBHq+pGgKr6/RzXONuG6XPR3WAeuputXz2H9c26qjoduGGaJvsAn6nOWcCWSbZbnW0ajuuP7YHfDLy+sp83aZvqbvV1M7D1nFQ3+4bp76CX0f3PcyGbsc/9cNP9q+prc1nYGjTMz/nBwIOTnJHkrCR7zll1a8YwfT4MeGGSK4H/BF47N6XNm5X99z6jdeqWVdKqSPJCYAnw1/Ndy5qUZAT4EHDQPJcy18bohlafTDc6cHqSXau7xd266gDgU1X1L0keD3w2ycOrany+C1soPHJcf1wF3H/g9f36eZO26W8YvQVw/ZxUN/uG6S9J9gDeBuxdVXfMUW1rykx93gx4OPCdJJfTnZs5aYFflDPMz/lK4KSqurOqfgX8nC4sF6ph+vwy4HiAqjoT2IjuD3Svq4b6974yDMf1x4+AnZM8IMmGdBfcnNS0OQl4ST+9L/Dt6s92L0Az9jfJo4CP0wXjQj8PBTP0uapurqpFVbW4qhbTnWfdu6qWzk+5s2KY3+sv0x01kmQR3TDrZXNZ5Cwbps9XAE8DSPIwunC8dk6rnFsnAS/ur1p9HHBzVf12dVbosOp6oqqWJXkN8E26q90+UVUXJ3k3sLSqTgL+L93wy6V0J7/3n7+KV8+Q/f0AsCnwhf66oyuqau95K3o1DdnndcqQff4m8PQklwDLgTdV1UIdERm2z/8A/EeSv6e7OOegBfwfXZIcQ/cfnEX9edR3ARsAVNXH6M6r7gVcCtwGvHS1t7mA95ckSWuEw6qSJDUMR0mSGoajJEkNw1GSpIbhKElSw3CUJKlhOEqS1Pj/k/SPHJXDBjsAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa8AAAEICAYAAADocntXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3dd5hcddnG8e8dkkBCFwLSQzCVJLSlCUgLEKkvgvReFJASqkgvUelVRCLSpddIFwjSAwspmAaIQQGB0EJHyvP+8TvrTsJudpKdnZkzc3+uay6mnDnznF3g3nPmzD2KCMzMzPKkU6UHMDMzm10OLzMzyx2Hl5mZ5Y7Dy8zMcsfhZWZmuePwMjOz3HF4mQGS5pL0iaRlS7lsJUn6gaSSfxZG0hBJUwtuT5G0XjHLzsFrXS7puDl9/izWO1zSVaVer5VP50oPYDYnJH1ScLM78CXwTXb75xHx59lZX0R8A8xX6mXrQUT0LcV6JO0H7BYRGxSse79SrNtqj8PLciki/hce2V/2+0XEQ60tL6lzRHxdjtnMrOP5sKHVpOyw0E2SbpD0MbCbpLUlPSPpQ0n/kXSRpC7Z8p0lhaSe2e3rssfvk/SxpKclLT+7y2aP/1jSS5KmS7pY0pOS9mpl7mJm/LmkVyR9IOmigufOJel8Se9JehUYOoufz/GSbpzpvksknZdd30/SpGx7/pHtFbW2rtclbZBd7y7p2my2CcBqMy17gqRXs/VOkLR1dv8g4HfAetkh2XcLfranFDz/gGzb35N0p6QlivnZtEXSttk8H0p6RFLfgseOk/SmpI8kTS7Y1rUkvZDd/7aks4t9PSuBiPDFl1xfgKnAkJnuGw78F9iK9EdaN2B1YE3SEYdewEvAwdnynYEAema3rwPeBRqALsBNwHVzsOxiwMfANtljRwBfAXu1si3FzHgXsCDQE3i/aduBg4EJwNLAIsBj6T/xFl+nF/AJMG/But8BGrLbW2XLCNgI+BwYnD02BJhasK7XgQ2y6+cAjwILA8sBE2dadgdgiex3sks2w+LZY/sBj84053XAKdn1TbMZVwbmAX4PPFLMz6aF7R8OXJVd75/NsVH2OzoOmJJdXxF4Dfh+tuzyQK/s+nPAztn1+YE1K/3fQj1dvOdlteyJiPhLRHwbEZ9HxHMRMToivo6IV4ERwPqzeP6tEdEYEV8Bfyb9T3N2l90SGBsRd2WPnU8KuhYVOeNvI2J6REwlBUXTa+0AnB8Rr0fEe8AZs3idV4G/k0IVYBPgg4hozB7/S0S8GskjwMNAiydlzGQHYHhEfBARr5H2pgpf9+aI+E/2O7me9IdHQxHrBdgVuDwixkbEF8CxwPqSli5YprWfzazsBIyMiEey39EZpABcE/iaFJQrZoee/5n97CD9EdJb0iIR8XFEjC5yO6wEHF5Wy/5deENSP0n3SHpL0kfAacCis3j+WwXXP2PWJ2m0tuyShXNERJD2VFpU5IxFvRZpj2FWrgd2zq7vkt1ummNLSaMlvS/pQ9Jez6x+Vk2WmNUMkvaSNC47PPch0K/I9ULavv+tLyI+Aj4AlipYZnZ+Z62t91vS72ipiJgCHEn6PbyTHYb+frbo3sAAYIqkZyVtXuR2WAk4vKyWzXya+GWkvY0fRMQCwEmkw2Id6T+kw3gASBIz/s92Zu2Z8T/AMgW32zqV/2ZgiKSlSHtg12czdgNuBX5LOqS3EPBgkXO81doMknoBlwIHAotk651csN62Tut/k3Qosml985MOT75RxFyzs95OpN/ZGwARcV1ErEM6ZDgX6edCREyJiJ1Ih4bPBW6TNE87Z7EiObysnswPTAc+ldQf+HkZXvNuYFVJW0nqDBwG9OigGW8GhklaStIiwC9ntXBEvAU8AVwFTImIl7OH5ga6AtOAbyRtCWw8GzMcJ2khpc/BHVzw2HykgJpGyvH9SXteTd4Glm46QaUFNwD7ShosaW5SiDweEa3uyc7GzFtL2iB77aNJ71OOltRf0obZ632eXb4lbcDukhbN9tSmZ9v2bTtnsSI5vKyeHAnsSfof02WkEys6VES8DewInAe8B6wAjCF9Lq3UM15Kem/qRdLJBLcW8ZzrSSdg/O+QYUR8CBwO3EE66WF7UggX42TSHuBU4D7gmoL1jgcuBp7NlukLFL5P9FfgZeBtSYWH/5qefz/p8N0d2fOXJb0P1i4RMYH0M7+UFKxDga2z97/mBs4ivU/5FmlP7/jsqZsDk5TOZj0H2DEi/tveeaw4SofgzawcJM1FOky1fUQ8Xul5zPLKe15mHUzS0Oww2tzAiaSz1J6t8FhmuebwMut46wKvkg5JbQZsGxGtHTY0syL4sKGZmeWO97zMzCx3XMxbJosuumj07Nmz0mOYmeXK888//25EfOfjJQ6vMunZsyeNjY2VHsPMLFcktdgU48OGZmaWOw4vMzPLHYeXmZnljsPLzMxyx+FlZma54/AyM7PccXiZmVnuOLyq3SWXwH33VXoKM7Oq4vCqZl9/DSNGwOabw1ZbwSuvVHoiM7Oq4PCqZp07w3PPwZlnwqOPwoorwvHHw6efVnoyM7OKcnhVu65d4ZhjYMoU2HFH+M1voG9fuPFG8DcCmFmdcnjlxZJLwjXXwBNPwGKLwc47wwYbwLhxlZ7MzKzsHF55s8466VDiZZfBhAmw6qrwi1/A++9XejIzs7JxeOXRXHPBz34GL70EBx0Ef/gD9OmTAu2bbyo9nZlZh3N45dn3vgcXXwxjxsDAgXDAAbD66vDkk5WezMysQzm8asHgwTBqVDqJY9o0WHdd2G03ePPNSk9mZtYhHF61QkpnI06enE6nv+WWdCjxzDPhyy8rPZ2ZWUk5vGrNvPPC8OEwcSJsvDEceywMGuSWDjOrKQ6vWrXCCnDXXSm0JLd0mFlNcXjVuqFD4cUX4ayzmls6jjsOPvmk0pOZmc0xh1c96NoVjj46nVq/447w299Cv35www1u6TCzXHJ41ZMllkgtHU8+CYsvDrvsAuuv75YOM8sdh1c9+uEP4dln04eaJ050S4eZ5Y7Dq141tXS8/HJzS0fv3umfbukwsyrn8Kp3Cy/c3NIxaBAceCA0NKQCYDOzKuXwsqSppeOmm+Ddd2G99VJLxxtvVHoyM7PvcHhZMwl22GHGlo6+fd3SYWZVx+Fl31XY0jFkSHNLx733VnoyMzPA4WWzssIKcOedzS0dW2wBW27plg4zqziHl7WtqaXj7LPhb39zS4eZVZzDy4rTtSscdVRq6dhpJ7d0mFlFObxs9iyxBFx9tVs6zKyiHF42Z5paOkaMgEmTUkvHQQfBe+9VejIzqwMOL5tzc80F+++fDiX+4hepbqpPH7d0mFmHc3hZ+y28MFx0EYwdmz7s7JYOM+tgDi8rnUGD4JFHZmzp2HVXt3SYWck5vKy0Cls6TjgBbrsttXSccYZbOsysZBxe1jHmnRdOP725peNXv3JLh5mVjMPLOlavXqml4/77oVMnt3SYWUk4vKw8NtsMxo+fsaXjV79yS4eZzRGHl5XPzC0dZ5yR3g+7/nq3dJjZbHF4Wfk1tXQ89VS6vuuu8KMfpVPtzcyK4PCyyll7bRg9Gv74x3R24mqruaXDzIri8LLKmmsu2G+/5paOESNSS8ell7qlw8xa5fCy6tDU0jFmTGrpOOigtCf2+OOVnszMqpDDy6pLU0vHzTfD+++n98J22cUtHWY2A4eXVR8JfvrT1FZ/4olw++1u6TCzGTi8rHrNOy+cdlpq6dhkk/S5sIED4Z57Kj2ZmVWYw8uqX69ecMcd8MAD6QSPLbdMl5dfrvRkZlYhDi/Lj003TS0d55wDjz2WWjqOPdYtHWZ1yOFl+dK1Kxx5JEyZkk7kOPNMt3SY1SGHl+XTEkvAVVe5pcOsTjm8LN/WXhuefXbGlo4DD3RLh1mNc3hZ/nXq1NzScfDBKch694bf/94tHWY1yuFltWPhheHCC1NLx8orp7qp1VZLJ3eYWU1xeFntGTQIHn64uaVj/fXTyR2vv17pycysRBxeVpuaWjomT25u6ejXD377W7d0mNUAh5fVtu7dU0vHpEmppeO449Lnw+6+u9KTmVk7OLysPiy/fHNLR5cusNVWsMUW6SQPM8sdh5fVl003hXHjUkvH44+nrsRjj4WPP670ZGY2GxxeVn+aWjpeeqm5paNfP/jzn93SYZYTDi+rX9//fmrpePppWHJJ2G03WG+9dKq9mVU1h5fZWmvB6NFw+eVpb8wtHWZVz+FlBqmlY999U3gdcsiMLR1ff13p6cxsJg4vs0ILLZRaOsaObW7paGhwS4dZlXF4mbVk4MDU0nHLLfDBB27pMKsyDi+z1kiw/fbpA84nnZRaOvr2dUuHWRVweJm1pXt3OPXUFGKbbeaWDrMq4PAyK9byy6e9rwcfdEuHWYU5vMxm1yabwPjxcO65zS0dv/ylWzrMysjhZTYnunSBI45Ie1277gpnnZXeD3NLh1lZOLzM2uP734crr0wtHUst5ZYOszJxeJmVQkstHQccAO++W+nJzGqSw8usVApbOg49NAVZnz5wySVu6TArMYeXWakttBBccEH66pVVVoGDD057Ym7pMCsZh5dZR1lxRXjoIbj1Vvjww9TSsfPObukwKwGHl1lHkmC77ZpbOu64I52V+JvfwBdfVHo6s9xyeJmVQ2FLx9ChcPzx6fNhf/mLT603mwMOL7NyWn55uO221NLRtStsvbVbOszmgMPLrBI22SSd0HHeefDkk27pMJtNDi+zSunSBQ4/HKZMmbGl47rrfCjRrA0OL7NKa2rpeOYZWHpp2H13WHddeOGFSk9mVrUcXmbVYs01U4D96U/w8svpG5x//nO3dJi1wOFlVk06dYJ99kkncBx2WAoyt3SYfYfDy6waLbQQnH/+d1s6/va3Sk9mVhUcXmbVbOaWjg02SC0d//53pSczqyiHl1m1K2zpOPlkuPNO6NcPfv1rt3RY3XJ4meVF9+5wyinNLR0nnJD2zNzSYXXI4WWWNz17Nrd0zD13aunYfPP0eTGzOuHwMsurwpaOp56CQYPgmGPc0mF1weFllmdNLR0vvQS77QZnn51Orb/2Wh9KtJrm8DKrBYsvDldckT7kvMwysMcebumwmubwMqslbumwOuHwMqs1LbV09O4Nv/udWzqsZji8zGpVU0vH+PGpneOQQ2DVVd3SYTXB4WVW6wYMgL/+NZ1e/9FHqaVjp53c0mG55vAyqwcS/OQnMHFiaum46y63dFiuObzM6klrLR0jR/rUessVh5dZPWpq6fjrX1NLxzbbuKXDcsXhZVbPhgxJLR3nn59aOgYOhKOPTu+NmVUxh5dZvevSBYYNS6fW7747nHMO9O2bWjq+/bbS05m1yOFlZklTS8fo0bDssm7psKrm8DKzGa2xBjz9dAqyf/yjuaVj2rRKT2b2Pw4vM/uuTp1g773TocRhw1JLR58+cPHFbumwquDwMrPWLbhg+sqVppaOQw9NLR2PPlrpyazOObzMrG0zt3RsuCHsuKNbOqxiHF5mVpymlo5Jk9IHnUeOTGclDh/ulg4rO4eXmc2ebt1SxdSkSemDzSeemPbM7rrLLR1WNg4vM5szPXvCrbemw4ndusH//R/8+McweXKlJ7M64PAys/YZMgTGjk0tHU8/DYMGuaXDOpzDy8zar6ml4+WX04ebm1o6rrnGLR3WIRxeZlY6iy2WPhPW1NKx556ppeP55ys9mdUYh5eZlV5TS8eVV6aWjtVXh5/9zC0dVjIOLzPrGJ06wV57Nbd0XHmlWzqsZBxeZtaxmlo6xo1LPYmHHgqrrOKWDmsXh5eZlceAAfDgg3D77fDJJ80tHf/6V6UnsxxyeJlZ+Uiw7bYwcSKcempq6ejXzy0dNtscXmZWft26wUknpQ80u6XD5oDDy8wqZ7nlUkvHQw+5pcNmi8PLzCpv441TS8cFF8Azz6SWjqOOckuHtcrhZWbVoUsXOOywdGr9nnumMxTd0mGtcHiZWXVZbDG4/PLU0rHccm7psBY5vMysOq2+Ojz1lFs6rEUOLzOrXoUtHYcfnoKsd2+46CK3dNQ5h5eZVb8FF4Rzz4Xx49Me2GGHpZaOUaMqPZlViMPLzPKjf/8ZWzo22gh22MEtHXXI4WVm+TJzS8df/pJaOk4/3S0ddcThZWb5VNjSscUW6fqAAXDnnW7pqAMOLzPLt+WWg1tugYcfhu7d017Z0KFu6ahxDi8zqw0bbQRjxsCFF6bPiLmlo6Y5vMysdnTpkr4vrLClo08fuPpqt3TUGIeXmdWewpaOnj3TZ8XWWQcaGys9mZWIw8vMaldTS8dVV8E//wlrrAH77w/vvFPpyaydHF5mVts6dUqHEKdMgSOOSEHWp49bOnLO4WVm9WHBBeGcc1JLxxprpJaOlVd2S0dOObzMrL707w8PPAB33AGffuqWjpxyeJlZ/ZHStzZPnAinnQZ3351aOk47DT7/vNLTWREcXmZWv7p1gxNPhEmTYMst4eST3dKREw4vM7PlloObb04tHfPO65aOHHB4mZk12WgjGDvWLR054PAyMyvUuXNzS8deezW3dFx1lVs6qojDy8ysJYstBn/8Y9oDW3552Htvt3RUEYeXmdmsrL46PPnkjC0d++3nlo4Kc3iZmbWlqaXjpZdSS8fVV6dDiRdeCF99Venp6pLDy8ysWAssMGNLx7BhsMoq8MgjlZ6s7ji8zMxm18wtHRtvDD/9Kbz2WqUnqxsOLzOzOTFzS8c996RQc0tHWTi8zMzao6mlY/LkGVs67rjDLR0dyOFlZlYKyy6bWjoeeQTmmw9+8hPYbLNUPWUl5/AyMyulDTeEMWPS94U99xwMHgxHHgnTp1d6spri8DIzK7XOneGQQ5pbOs4/H/r2dUtHCTm8zMw6So8eqaXj2WebWzp++MO0R2bt4vAyM+toDQ2ppePqq2HqVFhzTbd0tJPDy8ysHDp1gj32SIcSjzzSLR3t5PAyMyunBRaAs8+GF19Me2Bu6ZgjDi8zs0ro1w/uvz99a/Nnn6WWju23d0tHkRxeZmaVIsE226SWjtNPh3vvTaF26qlu6WiDw8vMrNLmmQdOOCG1dGy9NZxyils62uDwMjOrFssuCzfdNGNLx6abuqWjBQ4vM7NqU9jS0diYWjqOOMItHQUcXmZm1aiwpWPvveGCC9Kp9Vde6ZYOHF5mZtWtRw8YMSK1dPTqBfvs45YOHF5mZvlQ2NLx2mvpm5z33RfefrvSk1WEw8vMLC+aWjqmTIGjjoJrrkmHEi+4oO5aOhxeZmZ5U9jSsfbacPjhsPLK8PDDlZ6sbBxeZmZ51a8f3Hcf3HUXfPEFDBlSNy0dDi8zszyT0gebJ0yA4cPrpqXD4WVmVgvmmQeOPz69H9bU0tG/P9x+e022dDi8zMxqyTLLpJaOUaPSe2PbbZdaOiZOrPRkJeXwMjOrRRtsAC+8ABdfnFo6Vlqpplo6HF5mZrWqc2c4+ODU0rHPPjXV0uHwMjOrdT16wGWXpVaOFVZIQbb22qm1I6ccXmZm9WK11eCJJ9KHm//1r/RNzjlt6XB4mZnVk06dYPfd06HEo4+Ga69NhxLPPz9XLR0OLzOzejT//HDWWc0tHUcckauWDoeXmVk969v3uy0d220HU6dWerJZmmV4SRolabOZ7hsm6dI2nvdJ9s8lJd3ayjKPSmpoYz3DJHUvuH2vpIVm9ZxiSDpF0lHtXY+ZWU2YuaXj/vvTB5yruKWjrT2vG4CdZrpvp+z+NkXEmxGx/ZwMlhkG/C+8ImLziPiwHeszM7PWNLV0TJ4M22zT3NJx221V19LRVnjdCmwhqSuApJ7AksDjkuaT9LCkFyS9KGmbmZ8sqaekv2fXu0m6UdIkSXcA3QqWu1RSo6QJkk7N7js0e61RkkZl902VtGh2/QhJf88uwwpeb5KkP2brelBSN4rUyjrnlXSPpHHZ/Ttm958haaKk8ZLOKfY1zMyq3jLLwI03wqOPppaO7beHTTapqpaOWYZXRLwPPAv8OLtrJ+DmiAjgC2DbiFgV2BA4V5JmsboDgc8ioj9wMrBawWPHR0QDMBhYX9LgiLgIeBPYMCI2LFyRpNWAvYE1gbWA/SWtkj3cG7gkIlYEPgS2m+VPoO11DgXejIiVImIgcL+kRYBtgRUjYjAwvJV1/iwL5cZp06YVM4aZWfVYf/3mlo7nn4fBg9PXr1RBS0cxJ2wUHjosPGQo4DeSxgMPAUsBi89iPT8CrgOIiPHA+ILHdpD0AjAGWBEY0MZM6wJ3RMSnEfEJcDuwXvbYPyNibHb9eaBnG+tqa50vAptIOlPSehExHZhOCu8/SfoJ8FlLK4yIERHREBENPXr0KHIMM7Mq0tTS8fLL6TNhF16YTq2/4oqKtnQUE153ARtLWhXoHhHPZ/fvCvQAVouIlYG3gXlmdwBJywNHARtnezH3zMl6CnxZcP0boHM71kVEvASsSgqx4ZJOioivgTVIh1W3BO5vz2uYmVW9RRdNLR2NjfCDH6QgW2utirV0tBle2V7IKOAKZjxRY0HgnYj4StKGwHJtrOoxYBcASQNJhwgBFgA+BaZLWpzmQ5QAHwPzt7Cux4H/k9Rd0rykQ3iPt7UtbWhxnZKWJB3uvA44G1hV0nzAghFxL3A4sFI7X9vMLB9WXTW1dFx7Lbz+emrp2Gefsrd0FPs5rxtI/4MuDK8/Aw2SXgT2ACa3sY5LgfkkTQJOIx3SIyLGkQ4XTgauB54seM4I0ntMowpXFBEvAFeR3o8bDVweEWOK3JYmJ0h6vekyi3UOAp6VNJb0Xt1wUqDenR0yfQI4YjZf28wsvyTYbbf03WFHHw3XXVf2lg5FlZ3+WKsaGhqisbGx0mOYmZXelCkwbFjz58Muuih92LkEJD2fndA3AzdsVLmvv06Xb75J7436bw0zqzp9+8K998LIkfDll+m0+g5u6fCeV5nM6Z7XgAEwaVLLj0n1c6m37fXPrnou1f7zqzpffAHnnQe//nX6i/uXv4RjjoHu3dt+bgta2/NyeJXJnIbXH/4A06alPa56vkDlZ6iWi9nMKh2gLV6++QqmTUOffsq4f3+PuZdcZA63reXwatdp5NbxDjig0hNYNXLwl+8PhUrPkNcLdCFiSeLzL+jUoz2ffmqZw8ssh6r2kJHZd5Q+uMAnbJiZWQ45vMzMLHccXmZmljsOLzMzyx2Hl5mZ5Y7Dy8zMcsfhZWZmuePwMjOz3HF4mZlZ7ji8zMwsdxxeZmaWOw4vMzPLHYeXmZnlTrvDS9IiksZml7ckvVFwu2uR67hSUt82lvmFpF3bO2+2rickrVyKdZmZWfm1+ytRIuI9YGUASacAn0TEOYXLSBLpiy+/bWUdexfxOpe0d1YzM6sNHXbYUNIPJE2U9GdgArCEpBGSGiVNkHRSwbJPSFpZUmdJH0o6Q9I4SU9LWixbZrikYQXLnyHpWUlTJP0wu39eSbdlr3tr9lpF7WFJ6ibpakkvSnpB0o+y+wdJei7bkxwvqZek+SXdl834d0nbl/rnZ2Zmrevo97z6AedHxICIeAM4Nvs655WATSQNaOE5CwJ/i4iVgKeBfVpZtyJiDeBooCkIDwHeiogBwOnAKrMx66HAlxExCNgduDY77HkQcE5ErAysDrwJbA5MjYiVImIg8NcWB5R+lgVo47Rp02ZjFDMzm5WODq9/RERjwe2dJb0AvAD0B1oKr88j4r7s+vNAz1bWfXsLy6wL3AgQEeNIe3zFWhe4LnvuBFJI/QB4CjhB0jHAMhHxBTAeGJrt/a0TEdNbWmFEjIiIhoho6NGjx2yMYmZms9LR4fVp0xVJvYHDgI0iYjBwPy1/P/R/C65/Q+vvy31ZxDLtFhHXAttmr3e/pB9FxCSggRSOZ0g6rqNe38zMvqucp8ovAHwMfCRpCWCzDniNJ4EdIL1XRct7dq15HNg1e25/YAngFUm9IuKViLgQuBsYLGkp0okp1wLnAquWcBvMzKwNHbbH0oIXgInAZOA1UtCU2sXANZImZq81EWjxkB7wgKSvsuuPk95bu0zSi8BXwB4R8V9Ju0jaObvvTeAU4IekPa5vSXuKB3TAtpiZWSsUEZWeoWQkdQY6R8QX2WHKB4HeEfF1hUejoaEhGhsb217QzMz+R9Lz2Yl+Myjnnlc5zAc8nIWYgJ9XQ3CZmVlp1VR4RcSHwGqVnsPMzDqWuw3NzCx3HF5mZpY7NXXCRjWTNI10luWcWBR4t4Tj5IG3uT7U2zbX2/ZC+7d5uYj4TsuDwysHJDW2dLZNLfM214d62+Z6217ouG32YUMzM8sdh5eZmeWOwysfRlR6gArwNteHetvmette6KBt9nteZmaWO97zMjOz3HF4mZlZ7ji8qoikoZKmSHpF0rEtPD63pJuyx0dL6ln+KUuniO09QtJESeMlPSxpuUrMWUptbXPBcttJCkm5P626mG2WtEP2u54g6fpyz1hqRfy7vaykUZLGZP9+b16JOUtF0hWS3pH091Yel6SLsp/HeEnt/xqpiPClCi7AXMA/gF5AV2AcMGCmZQ4C/pBd3wm4qdJzd/D2bgh0z64fmOftLXabs+XmBx4DngEaKj13GX7PvYExwMLZ7cUqPXcZtnkEcGB2fQAwtdJzt3Obf0T6XsO/t/L45sB9pML0tYDR7X1N73lVjzWAVyLi1Yj4L3AjsM1My2wDXJ1dvxXYWJLKOGMptbm9ETEqIj7Lbj4DLF3mGUutmN8xwOnAmcAX5RyugxSzzfsDl0TEBwAR8U6ZZyy1YrY5SF/QC7Ag6bsCcysiHgPen8Ui2wDXRPIMsFD2pcRzzOFVPZYC/l1w+/XsvhaXifRVL9OBRcoyXekVs72F9iX95ZZnbW5zdjhlmYi4p5yDdaBifs99gD6SnpT0jKShZZuuYxSzzacAu0l6HbgXOKQ8o1XM7P733qaa+koUq02SdgMagPUrPUtHktQJOA/Yq8KjlFtn0qHDDUh7149JGhTpK45q1c7AVRFxrqS1gWslDYyIbys9WF54z6t6vAEsU3B76ey+FpfJvnBzQeC9skxXesVsL5KGAMcDW0fEl2WaraO0tc3zAwOBRyVNJb03MDLnJ20U83t+HRgZEWEsgyUAAAFOSURBVF9FxD+Bl0hhllfFbPO+wM0AEfE0MA+pwLZWFfXf++xweFWP54DekpaX1JV0QsbImZYZCeyZXd8eeCSyd0NzqM3tlbQKcBkpuPL+Pgi0sc0RMT0iFo2InhHRk/Q+39YR0ViZcUuimH+v7yTtdSFpUdJhxFfLOWSJFbPN/wI2BpDUnxRe08o6ZXmNBPbIzjpcC5geEf9pzwp92LBKRMTXkg4GHiCdrXRFREyQdBrQGBEjgT+RDi+8QnpzdKfKTdw+RW7v2cB8wC3ZeSn/ioitKzZ0OxW5zTWlyG1+ANhU0kTgG+DoiMjrEYVit/lI4I+SDiedvLFXjv8QRdINpD9AFs3exzsZ6AIQEX8gva+3OfAK8Bmwd7tfM8c/LzMzq1M+bGhmZrnj8DIzs9xxeJmZWe44vMzMLHccXmZmljsOLzMzyx2Hl5mZ5c7/A9k0u27giaYYAAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# PLOT LOSS AND ACCURACY\n", + "%matplotlib inline\n", + "\n", + "import matplotlib.image as mpimg\n", + "import matplotlib.pyplot as plt\n", + "\n", + "#-----------------------------------------------------------\n", + "# Retrieve a list of list results on training and test data\n", + "# sets for each training epoch\n", + "#-----------------------------------------------------------\n", + "acc=history.history['acc']\n", + "val_acc=history.history['val_acc']\n", + "loss=history.history['loss']\n", + "val_loss=history.history['val_loss']\n", + "\n", + "epochs=range(len(acc)) # Get number of epochs\n", + "\n", + "#------------------------------------------------\n", + "# Plot training and validation accuracy per epoch\n", + "#------------------------------------------------\n", + "plt.plot(epochs, acc, 'r', \"Training Accuracy\")\n", + "plt.plot(epochs, val_acc, 'b', \"Validation Accuracy\")\n", + "plt.title('Training and validation accuracy')\n", + "plt.figure()\n", + "\n", + "#------------------------------------------------\n", + "# Plot training and validation loss per epoch\n", + "#------------------------------------------------\n", + "plt.plot(epochs, loss, 'r', \"Training Loss\")\n", + "plt.plot(epochs, val_loss, 'b', \"Validation Loss\")\n", + "\n", + "\n", + "plt.title('Training and validation loss')\n", + "\n", + "# Desired output. Charts with training and validation metrics. No crash :)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Submission Instructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Now click the 'Submit Assignment' button above." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# When you're done or would like to take a break, please run the two cells below to save your work and close the Notebook. This will free up resources for your fellow learners. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "\n", + "IPython.notebook.save_checkpoint();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "IPython.notebook.session.delete();\n", + "window.onbeforeunload = null\n", + "setTimeout(function() { window.close(); }, 1000);" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "Exercise 6 - Question.ipynb", + "provenance": [] + }, + "coursera": { + "course_slug": "convolutional-neural-networks-tensorflow", + "graded_item_id": "uAPOR", + "launcher_item_id": "e9lTb" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/Week 3/Exercise_3_Horses_vs_humans_using_Transfer_Learning_Question-FINAL.ipynb b/Course 2: Convolutional Neural Networks in Tensorflow/Week 3/Exercise_3_Horses_vs_humans_using_Transfer_Learning_Question-FINAL.ipynb new file mode 100644 index 0000000..393ecef --- /dev/null +++ b/Course 2: Convolutional Neural Networks in Tensorflow/Week 3/Exercise_3_Horses_vs_humans_using_Transfer_Learning_Question-FINAL.ipynb @@ -0,0 +1,1650 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "lbFmQdsZs5eW" + }, + "outputs": [], + "source": [ + "# ATTENTION: Please do not alter any of the provided code in the exercise. Only add your own code where indicated\n", + "# ATTENTION: Please do not add or remove any cells in the exercise. The grader will check specific cells based on the cell position.\n", + "# ATTENTION: Please use the provided epoch values when training.\n", + "\n", + "# Import all the necessary files!\n", + "import os\n", + "import tensorflow as tf\n", + "from tensorflow.keras import layers\n", + "from tensorflow.keras import Model\n", + "from tensorflow.keras.callbacks import Callback\n", + "from os import getcwd" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "1xJZ5glPPCRz" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"inception_v3\"\n", + "__________________________________________________________________________________________________\n", + "Layer (type) Output Shape Param # Connected to \n", + "==================================================================================================\n", + "input_1 (InputLayer) [(None, 150, 150, 3) 0 \n", + "__________________________________________________________________________________________________\n", + "conv2d (Conv2D) (None, 74, 74, 32) 864 input_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization (BatchNorma (None, 74, 74, 32) 96 conv2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation (Activation) (None, 74, 74, 32) 0 batch_normalization[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_1 (Conv2D) (None, 72, 72, 32) 9216 activation[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_1 (BatchNor (None, 72, 72, 32) 96 conv2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_1 (Activation) (None, 72, 72, 32) 0 batch_normalization_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_2 (Conv2D) (None, 72, 72, 64) 18432 activation_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_2 (BatchNor (None, 72, 72, 64) 192 conv2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_2 (Activation) (None, 72, 72, 64) 0 batch_normalization_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d (MaxPooling2D) (None, 35, 35, 64) 0 activation_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_3 (Conv2D) (None, 35, 35, 80) 5120 max_pooling2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_3 (BatchNor (None, 35, 35, 80) 240 conv2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_3 (Activation) (None, 35, 35, 80) 0 batch_normalization_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_4 (Conv2D) (None, 33, 33, 192) 138240 activation_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_4 (BatchNor (None, 33, 33, 192) 576 conv2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_4 (Activation) (None, 33, 33, 192) 0 batch_normalization_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_1 (MaxPooling2D) (None, 16, 16, 192) 0 activation_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_8 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_8 (BatchNor (None, 16, 16, 64) 192 conv2d_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_8 (Activation) (None, 16, 16, 64) 0 batch_normalization_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_6 (Conv2D) (None, 16, 16, 48) 9216 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_9 (Conv2D) (None, 16, 16, 96) 55296 activation_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_6 (BatchNor (None, 16, 16, 48) 144 conv2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_9 (BatchNor (None, 16, 16, 96) 288 conv2d_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_6 (Activation) (None, 16, 16, 48) 0 batch_normalization_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_9 (Activation) (None, 16, 16, 96) 0 batch_normalization_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d (AveragePooli (None, 16, 16, 192) 0 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_5 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_7 (Conv2D) (None, 16, 16, 64) 76800 activation_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_10 (Conv2D) (None, 16, 16, 96) 82944 activation_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_11 (Conv2D) (None, 16, 16, 32) 6144 average_pooling2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_5 (BatchNor (None, 16, 16, 64) 192 conv2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_7 (BatchNor (None, 16, 16, 64) 192 conv2d_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_10 (BatchNo (None, 16, 16, 96) 288 conv2d_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_11 (BatchNo (None, 16, 16, 32) 96 conv2d_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_5 (Activation) (None, 16, 16, 64) 0 batch_normalization_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_7 (Activation) (None, 16, 16, 64) 0 batch_normalization_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_10 (Activation) (None, 16, 16, 96) 0 batch_normalization_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_11 (Activation) (None, 16, 16, 32) 0 batch_normalization_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed0 (Concatenate) (None, 16, 16, 256) 0 activation_5[0][0] \n", + " activation_7[0][0] \n", + " activation_10[0][0] \n", + " activation_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_15 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_15 (BatchNo (None, 16, 16, 64) 192 conv2d_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_15 (Activation) (None, 16, 16, 64) 0 batch_normalization_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_13 (Conv2D) (None, 16, 16, 48) 12288 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_16 (Conv2D) (None, 16, 16, 96) 55296 activation_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_13 (BatchNo (None, 16, 16, 48) 144 conv2d_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_16 (BatchNo (None, 16, 16, 96) 288 conv2d_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_13 (Activation) (None, 16, 16, 48) 0 batch_normalization_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_16 (Activation) (None, 16, 16, 96) 0 batch_normalization_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_1 (AveragePoo (None, 16, 16, 256) 0 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_12 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_14 (Conv2D) (None, 16, 16, 64) 76800 activation_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_17 (Conv2D) (None, 16, 16, 96) 82944 activation_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_18 (Conv2D) (None, 16, 16, 64) 16384 average_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_12 (BatchNo (None, 16, 16, 64) 192 conv2d_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_14 (BatchNo (None, 16, 16, 64) 192 conv2d_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_17 (BatchNo (None, 16, 16, 96) 288 conv2d_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_18 (BatchNo (None, 16, 16, 64) 192 conv2d_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_12 (Activation) (None, 16, 16, 64) 0 batch_normalization_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_14 (Activation) (None, 16, 16, 64) 0 batch_normalization_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_17 (Activation) (None, 16, 16, 96) 0 batch_normalization_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_18 (Activation) (None, 16, 16, 64) 0 batch_normalization_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed1 (Concatenate) (None, 16, 16, 288) 0 activation_12[0][0] \n", + " activation_14[0][0] \n", + " activation_17[0][0] \n", + " activation_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_22 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_22 (BatchNo (None, 16, 16, 64) 192 conv2d_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_22 (Activation) (None, 16, 16, 64) 0 batch_normalization_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_20 (Conv2D) (None, 16, 16, 48) 13824 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_23 (Conv2D) (None, 16, 16, 96) 55296 activation_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_20 (BatchNo (None, 16, 16, 48) 144 conv2d_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_23 (BatchNo (None, 16, 16, 96) 288 conv2d_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_20 (Activation) (None, 16, 16, 48) 0 batch_normalization_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_23 (Activation) (None, 16, 16, 96) 0 batch_normalization_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_2 (AveragePoo (None, 16, 16, 288) 0 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_19 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_21 (Conv2D) (None, 16, 16, 64) 76800 activation_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_24 (Conv2D) (None, 16, 16, 96) 82944 activation_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_25 (Conv2D) (None, 16, 16, 64) 18432 average_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_19 (BatchNo (None, 16, 16, 64) 192 conv2d_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_21 (BatchNo (None, 16, 16, 64) 192 conv2d_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_24 (BatchNo (None, 16, 16, 96) 288 conv2d_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_25 (BatchNo (None, 16, 16, 64) 192 conv2d_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_19 (Activation) (None, 16, 16, 64) 0 batch_normalization_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_21 (Activation) (None, 16, 16, 64) 0 batch_normalization_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_24 (Activation) (None, 16, 16, 96) 0 batch_normalization_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_25 (Activation) (None, 16, 16, 64) 0 batch_normalization_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed2 (Concatenate) (None, 16, 16, 288) 0 activation_19[0][0] \n", + " activation_21[0][0] \n", + " activation_24[0][0] \n", + " activation_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_27 (Conv2D) (None, 16, 16, 64) 18432 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_27 (BatchNo (None, 16, 16, 64) 192 conv2d_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_27 (Activation) (None, 16, 16, 64) 0 batch_normalization_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_28 (Conv2D) (None, 16, 16, 96) 55296 activation_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_28 (BatchNo (None, 16, 16, 96) 288 conv2d_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_28 (Activation) (None, 16, 16, 96) 0 batch_normalization_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_26 (Conv2D) (None, 7, 7, 384) 995328 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_29 (Conv2D) (None, 7, 7, 96) 82944 activation_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_26 (BatchNo (None, 7, 7, 384) 1152 conv2d_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_29 (BatchNo (None, 7, 7, 96) 288 conv2d_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_26 (Activation) (None, 7, 7, 384) 0 batch_normalization_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_29 (Activation) (None, 7, 7, 96) 0 batch_normalization_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_2 (MaxPooling2D) (None, 7, 7, 288) 0 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed3 (Concatenate) (None, 7, 7, 768) 0 activation_26[0][0] \n", + " activation_29[0][0] \n", + " max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_34 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_34 (BatchNo (None, 7, 7, 128) 384 conv2d_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_34 (Activation) (None, 7, 7, 128) 0 batch_normalization_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_35 (Conv2D) (None, 7, 7, 128) 114688 activation_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_35 (BatchNo (None, 7, 7, 128) 384 conv2d_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_35 (Activation) (None, 7, 7, 128) 0 batch_normalization_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_31 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_36 (Conv2D) (None, 7, 7, 128) 114688 activation_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_31 (BatchNo (None, 7, 7, 128) 384 conv2d_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_36 (BatchNo (None, 7, 7, 128) 384 conv2d_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_31 (Activation) (None, 7, 7, 128) 0 batch_normalization_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_36 (Activation) (None, 7, 7, 128) 0 batch_normalization_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_32 (Conv2D) (None, 7, 7, 128) 114688 activation_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_37 (Conv2D) (None, 7, 7, 128) 114688 activation_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_32 (BatchNo (None, 7, 7, 128) 384 conv2d_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_37 (BatchNo (None, 7, 7, 128) 384 conv2d_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_32 (Activation) (None, 7, 7, 128) 0 batch_normalization_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_37 (Activation) (None, 7, 7, 128) 0 batch_normalization_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_3 (AveragePoo (None, 7, 7, 768) 0 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_30 (Conv2D) (None, 7, 7, 192) 147456 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_33 (Conv2D) (None, 7, 7, 192) 172032 activation_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_38 (Conv2D) (None, 7, 7, 192) 172032 activation_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_39 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_30 (BatchNo (None, 7, 7, 192) 576 conv2d_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_33 (BatchNo (None, 7, 7, 192) 576 conv2d_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_38 (BatchNo (None, 7, 7, 192) 576 conv2d_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_39 (BatchNo (None, 7, 7, 192) 576 conv2d_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_30 (Activation) (None, 7, 7, 192) 0 batch_normalization_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_33 (Activation) (None, 7, 7, 192) 0 batch_normalization_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_38 (Activation) (None, 7, 7, 192) 0 batch_normalization_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_39 (Activation) (None, 7, 7, 192) 0 batch_normalization_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed4 (Concatenate) (None, 7, 7, 768) 0 activation_30[0][0] \n", + " activation_33[0][0] \n", + " activation_38[0][0] \n", + " activation_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_44 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_44 (BatchNo (None, 7, 7, 160) 480 conv2d_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_44 (Activation) (None, 7, 7, 160) 0 batch_normalization_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_45 (Conv2D) (None, 7, 7, 160) 179200 activation_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_45 (BatchNo (None, 7, 7, 160) 480 conv2d_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_45 (Activation) (None, 7, 7, 160) 0 batch_normalization_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_41 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_46 (Conv2D) (None, 7, 7, 160) 179200 activation_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_41 (BatchNo (None, 7, 7, 160) 480 conv2d_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_46 (BatchNo (None, 7, 7, 160) 480 conv2d_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_41 (Activation) (None, 7, 7, 160) 0 batch_normalization_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_46 (Activation) (None, 7, 7, 160) 0 batch_normalization_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_42 (Conv2D) (None, 7, 7, 160) 179200 activation_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_47 (Conv2D) (None, 7, 7, 160) 179200 activation_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_42 (BatchNo (None, 7, 7, 160) 480 conv2d_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_47 (BatchNo (None, 7, 7, 160) 480 conv2d_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_42 (Activation) (None, 7, 7, 160) 0 batch_normalization_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_47 (Activation) (None, 7, 7, 160) 0 batch_normalization_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_4 (AveragePoo (None, 7, 7, 768) 0 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_40 (Conv2D) (None, 7, 7, 192) 147456 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_43 (Conv2D) (None, 7, 7, 192) 215040 activation_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_48 (Conv2D) (None, 7, 7, 192) 215040 activation_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_49 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_40 (BatchNo (None, 7, 7, 192) 576 conv2d_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_43 (BatchNo (None, 7, 7, 192) 576 conv2d_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_48 (BatchNo (None, 7, 7, 192) 576 conv2d_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_49 (BatchNo (None, 7, 7, 192) 576 conv2d_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_40 (Activation) (None, 7, 7, 192) 0 batch_normalization_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_43 (Activation) (None, 7, 7, 192) 0 batch_normalization_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_48 (Activation) (None, 7, 7, 192) 0 batch_normalization_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_49 (Activation) (None, 7, 7, 192) 0 batch_normalization_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed5 (Concatenate) (None, 7, 7, 768) 0 activation_40[0][0] \n", + " activation_43[0][0] \n", + " activation_48[0][0] \n", + " activation_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_54 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_54 (BatchNo (None, 7, 7, 160) 480 conv2d_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_54 (Activation) (None, 7, 7, 160) 0 batch_normalization_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_55 (Conv2D) (None, 7, 7, 160) 179200 activation_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_55 (BatchNo (None, 7, 7, 160) 480 conv2d_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_55 (Activation) (None, 7, 7, 160) 0 batch_normalization_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_51 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_56 (Conv2D) (None, 7, 7, 160) 179200 activation_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_51 (BatchNo (None, 7, 7, 160) 480 conv2d_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_56 (BatchNo (None, 7, 7, 160) 480 conv2d_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_51 (Activation) (None, 7, 7, 160) 0 batch_normalization_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_56 (Activation) (None, 7, 7, 160) 0 batch_normalization_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_52 (Conv2D) (None, 7, 7, 160) 179200 activation_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_57 (Conv2D) (None, 7, 7, 160) 179200 activation_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_52 (BatchNo (None, 7, 7, 160) 480 conv2d_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_57 (BatchNo (None, 7, 7, 160) 480 conv2d_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_52 (Activation) (None, 7, 7, 160) 0 batch_normalization_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_57 (Activation) (None, 7, 7, 160) 0 batch_normalization_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_5 (AveragePoo (None, 7, 7, 768) 0 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_50 (Conv2D) (None, 7, 7, 192) 147456 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_53 (Conv2D) (None, 7, 7, 192) 215040 activation_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_58 (Conv2D) (None, 7, 7, 192) 215040 activation_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_59 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_50 (BatchNo (None, 7, 7, 192) 576 conv2d_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_53 (BatchNo (None, 7, 7, 192) 576 conv2d_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_58 (BatchNo (None, 7, 7, 192) 576 conv2d_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_59 (BatchNo (None, 7, 7, 192) 576 conv2d_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_50 (Activation) (None, 7, 7, 192) 0 batch_normalization_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_53 (Activation) (None, 7, 7, 192) 0 batch_normalization_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_58 (Activation) (None, 7, 7, 192) 0 batch_normalization_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_59 (Activation) (None, 7, 7, 192) 0 batch_normalization_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed6 (Concatenate) (None, 7, 7, 768) 0 activation_50[0][0] \n", + " activation_53[0][0] \n", + " activation_58[0][0] \n", + " activation_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_64 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_64 (BatchNo (None, 7, 7, 192) 576 conv2d_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_64 (Activation) (None, 7, 7, 192) 0 batch_normalization_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_65 (Conv2D) (None, 7, 7, 192) 258048 activation_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_65 (BatchNo (None, 7, 7, 192) 576 conv2d_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_65 (Activation) (None, 7, 7, 192) 0 batch_normalization_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_61 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_66 (Conv2D) (None, 7, 7, 192) 258048 activation_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_61 (BatchNo (None, 7, 7, 192) 576 conv2d_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_66 (BatchNo (None, 7, 7, 192) 576 conv2d_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_61 (Activation) (None, 7, 7, 192) 0 batch_normalization_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_66 (Activation) (None, 7, 7, 192) 0 batch_normalization_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_62 (Conv2D) (None, 7, 7, 192) 258048 activation_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_67 (Conv2D) (None, 7, 7, 192) 258048 activation_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_62 (BatchNo (None, 7, 7, 192) 576 conv2d_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_67 (BatchNo (None, 7, 7, 192) 576 conv2d_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_62 (Activation) (None, 7, 7, 192) 0 batch_normalization_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_67 (Activation) (None, 7, 7, 192) 0 batch_normalization_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_6 (AveragePoo (None, 7, 7, 768) 0 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_60 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_63 (Conv2D) (None, 7, 7, 192) 258048 activation_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_68 (Conv2D) (None, 7, 7, 192) 258048 activation_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_69 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_60 (BatchNo (None, 7, 7, 192) 576 conv2d_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_63 (BatchNo (None, 7, 7, 192) 576 conv2d_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_68 (BatchNo (None, 7, 7, 192) 576 conv2d_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_69 (BatchNo (None, 7, 7, 192) 576 conv2d_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_60 (Activation) (None, 7, 7, 192) 0 batch_normalization_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_63 (Activation) (None, 7, 7, 192) 0 batch_normalization_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_68 (Activation) (None, 7, 7, 192) 0 batch_normalization_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_69 (Activation) (None, 7, 7, 192) 0 batch_normalization_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed7 (Concatenate) (None, 7, 7, 768) 0 activation_60[0][0] \n", + " activation_63[0][0] \n", + " activation_68[0][0] \n", + " activation_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_72 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_72 (BatchNo (None, 7, 7, 192) 576 conv2d_72[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_72 (Activation) (None, 7, 7, 192) 0 batch_normalization_72[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_73 (Conv2D) (None, 7, 7, 192) 258048 activation_72[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_73 (BatchNo (None, 7, 7, 192) 576 conv2d_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_73 (Activation) (None, 7, 7, 192) 0 batch_normalization_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_70 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_74 (Conv2D) (None, 7, 7, 192) 258048 activation_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_70 (BatchNo (None, 7, 7, 192) 576 conv2d_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_74 (BatchNo (None, 7, 7, 192) 576 conv2d_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_70 (Activation) (None, 7, 7, 192) 0 batch_normalization_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_74 (Activation) (None, 7, 7, 192) 0 batch_normalization_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_71 (Conv2D) (None, 3, 3, 320) 552960 activation_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_75 (Conv2D) (None, 3, 3, 192) 331776 activation_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_71 (BatchNo (None, 3, 3, 320) 960 conv2d_71[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_75 (BatchNo (None, 3, 3, 192) 576 conv2d_75[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_71 (Activation) (None, 3, 3, 320) 0 batch_normalization_71[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_75 (Activation) (None, 3, 3, 192) 0 batch_normalization_75[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_3 (MaxPooling2D) (None, 3, 3, 768) 0 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed8 (Concatenate) (None, 3, 3, 1280) 0 activation_71[0][0] \n", + " activation_75[0][0] \n", + " max_pooling2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_80 (Conv2D) (None, 3, 3, 448) 573440 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_80 (BatchNo (None, 3, 3, 448) 1344 conv2d_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_80 (Activation) (None, 3, 3, 448) 0 batch_normalization_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_77 (Conv2D) (None, 3, 3, 384) 491520 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_81 (Conv2D) (None, 3, 3, 384) 1548288 activation_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_77 (BatchNo (None, 3, 3, 384) 1152 conv2d_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_81 (BatchNo (None, 3, 3, 384) 1152 conv2d_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_77 (Activation) (None, 3, 3, 384) 0 batch_normalization_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_81 (Activation) (None, 3, 3, 384) 0 batch_normalization_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_78 (Conv2D) (None, 3, 3, 384) 442368 activation_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_79 (Conv2D) (None, 3, 3, 384) 442368 activation_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_82 (Conv2D) (None, 3, 3, 384) 442368 activation_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_83 (Conv2D) (None, 3, 3, 384) 442368 activation_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_7 (AveragePoo (None, 3, 3, 1280) 0 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_76 (Conv2D) (None, 3, 3, 320) 409600 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_78 (BatchNo (None, 3, 3, 384) 1152 conv2d_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_79 (BatchNo (None, 3, 3, 384) 1152 conv2d_79[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_82 (BatchNo (None, 3, 3, 384) 1152 conv2d_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_83 (BatchNo (None, 3, 3, 384) 1152 conv2d_83[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_84 (Conv2D) (None, 3, 3, 192) 245760 average_pooling2d_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_76 (BatchNo (None, 3, 3, 320) 960 conv2d_76[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_78 (Activation) (None, 3, 3, 384) 0 batch_normalization_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_79 (Activation) (None, 3, 3, 384) 0 batch_normalization_79[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_82 (Activation) (None, 3, 3, 384) 0 batch_normalization_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_83 (Activation) (None, 3, 3, 384) 0 batch_normalization_83[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_84 (BatchNo (None, 3, 3, 192) 576 conv2d_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_76 (Activation) (None, 3, 3, 320) 0 batch_normalization_76[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9_0 (Concatenate) (None, 3, 3, 768) 0 activation_78[0][0] \n", + " activation_79[0][0] \n", + "__________________________________________________________________________________________________\n", + "concatenate (Concatenate) (None, 3, 3, 768) 0 activation_82[0][0] \n", + " activation_83[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_84 (Activation) (None, 3, 3, 192) 0 batch_normalization_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9 (Concatenate) (None, 3, 3, 2048) 0 activation_76[0][0] \n", + " mixed9_0[0][0] \n", + " concatenate[0][0] \n", + " activation_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_89 (Conv2D) (None, 3, 3, 448) 917504 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_89 (BatchNo (None, 3, 3, 448) 1344 conv2d_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_89 (Activation) (None, 3, 3, 448) 0 batch_normalization_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_86 (Conv2D) (None, 3, 3, 384) 786432 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_90 (Conv2D) (None, 3, 3, 384) 1548288 activation_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_86 (BatchNo (None, 3, 3, 384) 1152 conv2d_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_90 (BatchNo (None, 3, 3, 384) 1152 conv2d_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_86 (Activation) (None, 3, 3, 384) 0 batch_normalization_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_90 (Activation) (None, 3, 3, 384) 0 batch_normalization_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_87 (Conv2D) (None, 3, 3, 384) 442368 activation_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_88 (Conv2D) (None, 3, 3, 384) 442368 activation_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_91 (Conv2D) (None, 3, 3, 384) 442368 activation_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_92 (Conv2D) (None, 3, 3, 384) 442368 activation_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_8 (AveragePoo (None, 3, 3, 2048) 0 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_85 (Conv2D) (None, 3, 3, 320) 655360 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_87 (BatchNo (None, 3, 3, 384) 1152 conv2d_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_88 (BatchNo (None, 3, 3, 384) 1152 conv2d_88[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_91 (BatchNo (None, 3, 3, 384) 1152 conv2d_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_92 (BatchNo (None, 3, 3, 384) 1152 conv2d_92[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_93 (Conv2D) (None, 3, 3, 192) 393216 average_pooling2d_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_85 (BatchNo (None, 3, 3, 320) 960 conv2d_85[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_87 (Activation) (None, 3, 3, 384) 0 batch_normalization_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_88 (Activation) (None, 3, 3, 384) 0 batch_normalization_88[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_91 (Activation) (None, 3, 3, 384) 0 batch_normalization_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_92 (Activation) (None, 3, 3, 384) 0 batch_normalization_92[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_93 (BatchNo (None, 3, 3, 192) 576 conv2d_93[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_85 (Activation) (None, 3, 3, 320) 0 batch_normalization_85[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9_1 (Concatenate) (None, 3, 3, 768) 0 activation_87[0][0] \n", + " activation_88[0][0] \n", + "__________________________________________________________________________________________________\n", + "concatenate_1 (Concatenate) (None, 3, 3, 768) 0 activation_91[0][0] \n", + " activation_92[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_93 (Activation) (None, 3, 3, 192) 0 batch_normalization_93[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed10 (Concatenate) (None, 3, 3, 2048) 0 activation_85[0][0] \n", + " mixed9_1[0][0] \n", + " concatenate_1[0][0] \n", + " activation_93[0][0] \n", + "==================================================================================================\n", + "Total params: 21,802,784\n", + "Trainable params: 0\n", + "Non-trainable params: 21,802,784\n", + "__________________________________________________________________________________________________\n" + ] + } + ], + "source": [ + "path_inception = f\"{getcwd()}/../tmp2/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5\"\n", + "\n", + "# Import the inception model \n", + "from tensorflow.keras.applications.inception_v3 import InceptionV3\n", + "\n", + "# Create an instance of the inception model from the local pre-trained weights\n", + "local_weights_file = path_inception\n", + "\n", + "pre_trained_model = InceptionV3(input_shape = (150,150,3),\n", + " include_top = False,\n", + " weights = None)# Your Code Here\n", + "\n", + "pre_trained_model.load_weights(local_weights_file)\n", + "\n", + "# Make all the layers in the pre-trained model non-trainable\n", + "for layer in pre_trained_model.layers:\n", + " layer.trainable = False\n", + " # Your Code Here\n", + " \n", + "# Print the model summary\n", + "pre_trained_model.summary()\n", + "\n", + "# Expected Output is extremely large, but should end with:\n", + "\n", + "#batch_normalization_v1_281 (Bat (None, 3, 3, 192) 576 conv2d_281[0][0] \n", + "#__________________________________________________________________________________________________\n", + "#activation_273 (Activation) (None, 3, 3, 320) 0 batch_normalization_v1_273[0][0] \n", + "#__________________________________________________________________________________________________\n", + "#mixed9_1 (Concatenate) (None, 3, 3, 768) 0 activation_275[0][0] \n", + "# activation_276[0][0] \n", + "#__________________________________________________________________________________________________\n", + "#concatenate_5 (Concatenate) (None, 3, 3, 768) 0 activation_279[0][0] \n", + "# activation_280[0][0] \n", + "#__________________________________________________________________________________________________\n", + "#activation_281 (Activation) (None, 3, 3, 192) 0 batch_normalization_v1_281[0][0] \n", + "#__________________________________________________________________________________________________\n", + "#mixed10 (Concatenate) (None, 3, 3, 2048) 0 activation_273[0][0] \n", + "# mixed9_1[0][0] \n", + "# concatenate_5[0][0] \n", + "# activation_281[0][0] \n", + "#==================================================================================================\n", + "#Total params: 21,802,784\n", + "#Trainable params: 0\n", + "#Non-trainable params: 21,802,784" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "CFsUlwdfs_wg" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "last layer output shape: (None, 7, 7, 768)\n" + ] + } + ], + "source": [ + "last_layer = pre_trained_model.get_layer('mixed7')# Your Code Here)\n", + "print('last layer output shape: ', last_layer.output_shape)\n", + "last_output = last_layer.output# Your Code Here\n", + "\n", + "# Expected Output:\n", + "# ('last layer output shape: ', (None, 7, 7, 768))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "-bsWZWp5oMq9" + }, + "outputs": [], + "source": [ + "# Define a Callback class that stops training once accuracy reaches 97.0%\n", + "class myCallback(tf.keras.callbacks.Callback):\n", + " def on_epoch_end(self, epoch, logs={}):\n", + " if(logs.get('acc')>0.97):\n", + " print(\"\\nReached 97.0% accuracy so cancelling training!\")\n", + " self.model.stop_training = True " + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "BMXb913pbvFg" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model: \"model\"\n", + "__________________________________________________________________________________________________\n", + "Layer (type) Output Shape Param # Connected to \n", + "==================================================================================================\n", + "input_1 (InputLayer) [(None, 150, 150, 3) 0 \n", + "__________________________________________________________________________________________________\n", + "conv2d (Conv2D) (None, 74, 74, 32) 864 input_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization (BatchNorma (None, 74, 74, 32) 96 conv2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation (Activation) (None, 74, 74, 32) 0 batch_normalization[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_1 (Conv2D) (None, 72, 72, 32) 9216 activation[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_1 (BatchNor (None, 72, 72, 32) 96 conv2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_1 (Activation) (None, 72, 72, 32) 0 batch_normalization_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_2 (Conv2D) (None, 72, 72, 64) 18432 activation_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_2 (BatchNor (None, 72, 72, 64) 192 conv2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_2 (Activation) (None, 72, 72, 64) 0 batch_normalization_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d (MaxPooling2D) (None, 35, 35, 64) 0 activation_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_3 (Conv2D) (None, 35, 35, 80) 5120 max_pooling2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_3 (BatchNor (None, 35, 35, 80) 240 conv2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_3 (Activation) (None, 35, 35, 80) 0 batch_normalization_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_4 (Conv2D) (None, 33, 33, 192) 138240 activation_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_4 (BatchNor (None, 33, 33, 192) 576 conv2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_4 (Activation) (None, 33, 33, 192) 0 batch_normalization_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_1 (MaxPooling2D) (None, 16, 16, 192) 0 activation_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_8 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_8 (BatchNor (None, 16, 16, 64) 192 conv2d_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_8 (Activation) (None, 16, 16, 64) 0 batch_normalization_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_6 (Conv2D) (None, 16, 16, 48) 9216 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_9 (Conv2D) (None, 16, 16, 96) 55296 activation_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_6 (BatchNor (None, 16, 16, 48) 144 conv2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_9 (BatchNor (None, 16, 16, 96) 288 conv2d_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_6 (Activation) (None, 16, 16, 48) 0 batch_normalization_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_9 (Activation) (None, 16, 16, 96) 0 batch_normalization_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d (AveragePooli (None, 16, 16, 192) 0 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_5 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_7 (Conv2D) (None, 16, 16, 64) 76800 activation_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_10 (Conv2D) (None, 16, 16, 96) 82944 activation_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_11 (Conv2D) (None, 16, 16, 32) 6144 average_pooling2d[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_5 (BatchNor (None, 16, 16, 64) 192 conv2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_7 (BatchNor (None, 16, 16, 64) 192 conv2d_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_10 (BatchNo (None, 16, 16, 96) 288 conv2d_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_11 (BatchNo (None, 16, 16, 32) 96 conv2d_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_5 (Activation) (None, 16, 16, 64) 0 batch_normalization_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_7 (Activation) (None, 16, 16, 64) 0 batch_normalization_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_10 (Activation) (None, 16, 16, 96) 0 batch_normalization_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_11 (Activation) (None, 16, 16, 32) 0 batch_normalization_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed0 (Concatenate) (None, 16, 16, 256) 0 activation_5[0][0] \n", + " activation_7[0][0] \n", + " activation_10[0][0] \n", + " activation_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_15 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_15 (BatchNo (None, 16, 16, 64) 192 conv2d_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_15 (Activation) (None, 16, 16, 64) 0 batch_normalization_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_13 (Conv2D) (None, 16, 16, 48) 12288 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_16 (Conv2D) (None, 16, 16, 96) 55296 activation_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_13 (BatchNo (None, 16, 16, 48) 144 conv2d_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_16 (BatchNo (None, 16, 16, 96) 288 conv2d_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_13 (Activation) (None, 16, 16, 48) 0 batch_normalization_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_16 (Activation) (None, 16, 16, 96) 0 batch_normalization_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_1 (AveragePoo (None, 16, 16, 256) 0 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_12 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_14 (Conv2D) (None, 16, 16, 64) 76800 activation_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_17 (Conv2D) (None, 16, 16, 96) 82944 activation_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_18 (Conv2D) (None, 16, 16, 64) 16384 average_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_12 (BatchNo (None, 16, 16, 64) 192 conv2d_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_14 (BatchNo (None, 16, 16, 64) 192 conv2d_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_17 (BatchNo (None, 16, 16, 96) 288 conv2d_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_18 (BatchNo (None, 16, 16, 64) 192 conv2d_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_12 (Activation) (None, 16, 16, 64) 0 batch_normalization_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_14 (Activation) (None, 16, 16, 64) 0 batch_normalization_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_17 (Activation) (None, 16, 16, 96) 0 batch_normalization_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_18 (Activation) (None, 16, 16, 64) 0 batch_normalization_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed1 (Concatenate) (None, 16, 16, 288) 0 activation_12[0][0] \n", + " activation_14[0][0] \n", + " activation_17[0][0] \n", + " activation_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_22 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_22 (BatchNo (None, 16, 16, 64) 192 conv2d_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_22 (Activation) (None, 16, 16, 64) 0 batch_normalization_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_20 (Conv2D) (None, 16, 16, 48) 13824 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_23 (Conv2D) (None, 16, 16, 96) 55296 activation_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_20 (BatchNo (None, 16, 16, 48) 144 conv2d_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_23 (BatchNo (None, 16, 16, 96) 288 conv2d_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_20 (Activation) (None, 16, 16, 48) 0 batch_normalization_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_23 (Activation) (None, 16, 16, 96) 0 batch_normalization_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_2 (AveragePoo (None, 16, 16, 288) 0 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_19 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_21 (Conv2D) (None, 16, 16, 64) 76800 activation_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_24 (Conv2D) (None, 16, 16, 96) 82944 activation_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_25 (Conv2D) (None, 16, 16, 64) 18432 average_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_19 (BatchNo (None, 16, 16, 64) 192 conv2d_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_21 (BatchNo (None, 16, 16, 64) 192 conv2d_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_24 (BatchNo (None, 16, 16, 96) 288 conv2d_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_25 (BatchNo (None, 16, 16, 64) 192 conv2d_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_19 (Activation) (None, 16, 16, 64) 0 batch_normalization_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_21 (Activation) (None, 16, 16, 64) 0 batch_normalization_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_24 (Activation) (None, 16, 16, 96) 0 batch_normalization_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_25 (Activation) (None, 16, 16, 64) 0 batch_normalization_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed2 (Concatenate) (None, 16, 16, 288) 0 activation_19[0][0] \n", + " activation_21[0][0] \n", + " activation_24[0][0] \n", + " activation_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_27 (Conv2D) (None, 16, 16, 64) 18432 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_27 (BatchNo (None, 16, 16, 64) 192 conv2d_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_27 (Activation) (None, 16, 16, 64) 0 batch_normalization_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_28 (Conv2D) (None, 16, 16, 96) 55296 activation_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_28 (BatchNo (None, 16, 16, 96) 288 conv2d_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_28 (Activation) (None, 16, 16, 96) 0 batch_normalization_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_26 (Conv2D) (None, 7, 7, 384) 995328 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_29 (Conv2D) (None, 7, 7, 96) 82944 activation_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_26 (BatchNo (None, 7, 7, 384) 1152 conv2d_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_29 (BatchNo (None, 7, 7, 96) 288 conv2d_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_26 (Activation) (None, 7, 7, 384) 0 batch_normalization_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_29 (Activation) (None, 7, 7, 96) 0 batch_normalization_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_2 (MaxPooling2D) (None, 7, 7, 288) 0 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed3 (Concatenate) (None, 7, 7, 768) 0 activation_26[0][0] \n", + " activation_29[0][0] \n", + " max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_34 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_34 (BatchNo (None, 7, 7, 128) 384 conv2d_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_34 (Activation) (None, 7, 7, 128) 0 batch_normalization_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_35 (Conv2D) (None, 7, 7, 128) 114688 activation_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_35 (BatchNo (None, 7, 7, 128) 384 conv2d_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_35 (Activation) (None, 7, 7, 128) 0 batch_normalization_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_31 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_36 (Conv2D) (None, 7, 7, 128) 114688 activation_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_31 (BatchNo (None, 7, 7, 128) 384 conv2d_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_36 (BatchNo (None, 7, 7, 128) 384 conv2d_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_31 (Activation) (None, 7, 7, 128) 0 batch_normalization_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_36 (Activation) (None, 7, 7, 128) 0 batch_normalization_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_32 (Conv2D) (None, 7, 7, 128) 114688 activation_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_37 (Conv2D) (None, 7, 7, 128) 114688 activation_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_32 (BatchNo (None, 7, 7, 128) 384 conv2d_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_37 (BatchNo (None, 7, 7, 128) 384 conv2d_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_32 (Activation) (None, 7, 7, 128) 0 batch_normalization_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_37 (Activation) (None, 7, 7, 128) 0 batch_normalization_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_3 (AveragePoo (None, 7, 7, 768) 0 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_30 (Conv2D) (None, 7, 7, 192) 147456 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_33 (Conv2D) (None, 7, 7, 192) 172032 activation_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_38 (Conv2D) (None, 7, 7, 192) 172032 activation_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_39 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_30 (BatchNo (None, 7, 7, 192) 576 conv2d_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_33 (BatchNo (None, 7, 7, 192) 576 conv2d_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_38 (BatchNo (None, 7, 7, 192) 576 conv2d_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_39 (BatchNo (None, 7, 7, 192) 576 conv2d_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_30 (Activation) (None, 7, 7, 192) 0 batch_normalization_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_33 (Activation) (None, 7, 7, 192) 0 batch_normalization_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_38 (Activation) (None, 7, 7, 192) 0 batch_normalization_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_39 (Activation) (None, 7, 7, 192) 0 batch_normalization_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed4 (Concatenate) (None, 7, 7, 768) 0 activation_30[0][0] \n", + " activation_33[0][0] \n", + " activation_38[0][0] \n", + " activation_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_44 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_44 (BatchNo (None, 7, 7, 160) 480 conv2d_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_44 (Activation) (None, 7, 7, 160) 0 batch_normalization_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_45 (Conv2D) (None, 7, 7, 160) 179200 activation_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_45 (BatchNo (None, 7, 7, 160) 480 conv2d_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_45 (Activation) (None, 7, 7, 160) 0 batch_normalization_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_41 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_46 (Conv2D) (None, 7, 7, 160) 179200 activation_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_41 (BatchNo (None, 7, 7, 160) 480 conv2d_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_46 (BatchNo (None, 7, 7, 160) 480 conv2d_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_41 (Activation) (None, 7, 7, 160) 0 batch_normalization_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_46 (Activation) (None, 7, 7, 160) 0 batch_normalization_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_42 (Conv2D) (None, 7, 7, 160) 179200 activation_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_47 (Conv2D) (None, 7, 7, 160) 179200 activation_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_42 (BatchNo (None, 7, 7, 160) 480 conv2d_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_47 (BatchNo (None, 7, 7, 160) 480 conv2d_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_42 (Activation) (None, 7, 7, 160) 0 batch_normalization_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_47 (Activation) (None, 7, 7, 160) 0 batch_normalization_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_4 (AveragePoo (None, 7, 7, 768) 0 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_40 (Conv2D) (None, 7, 7, 192) 147456 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_43 (Conv2D) (None, 7, 7, 192) 215040 activation_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_48 (Conv2D) (None, 7, 7, 192) 215040 activation_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_49 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_40 (BatchNo (None, 7, 7, 192) 576 conv2d_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_43 (BatchNo (None, 7, 7, 192) 576 conv2d_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_48 (BatchNo (None, 7, 7, 192) 576 conv2d_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_49 (BatchNo (None, 7, 7, 192) 576 conv2d_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_40 (Activation) (None, 7, 7, 192) 0 batch_normalization_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_43 (Activation) (None, 7, 7, 192) 0 batch_normalization_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_48 (Activation) (None, 7, 7, 192) 0 batch_normalization_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_49 (Activation) (None, 7, 7, 192) 0 batch_normalization_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed5 (Concatenate) (None, 7, 7, 768) 0 activation_40[0][0] \n", + " activation_43[0][0] \n", + " activation_48[0][0] \n", + " activation_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_54 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_54 (BatchNo (None, 7, 7, 160) 480 conv2d_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_54 (Activation) (None, 7, 7, 160) 0 batch_normalization_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_55 (Conv2D) (None, 7, 7, 160) 179200 activation_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_55 (BatchNo (None, 7, 7, 160) 480 conv2d_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_55 (Activation) (None, 7, 7, 160) 0 batch_normalization_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_51 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_56 (Conv2D) (None, 7, 7, 160) 179200 activation_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_51 (BatchNo (None, 7, 7, 160) 480 conv2d_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_56 (BatchNo (None, 7, 7, 160) 480 conv2d_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_51 (Activation) (None, 7, 7, 160) 0 batch_normalization_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_56 (Activation) (None, 7, 7, 160) 0 batch_normalization_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_52 (Conv2D) (None, 7, 7, 160) 179200 activation_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_57 (Conv2D) (None, 7, 7, 160) 179200 activation_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_52 (BatchNo (None, 7, 7, 160) 480 conv2d_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_57 (BatchNo (None, 7, 7, 160) 480 conv2d_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_52 (Activation) (None, 7, 7, 160) 0 batch_normalization_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_57 (Activation) (None, 7, 7, 160) 0 batch_normalization_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_5 (AveragePoo (None, 7, 7, 768) 0 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_50 (Conv2D) (None, 7, 7, 192) 147456 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_53 (Conv2D) (None, 7, 7, 192) 215040 activation_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_58 (Conv2D) (None, 7, 7, 192) 215040 activation_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_59 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_50 (BatchNo (None, 7, 7, 192) 576 conv2d_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_53 (BatchNo (None, 7, 7, 192) 576 conv2d_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_58 (BatchNo (None, 7, 7, 192) 576 conv2d_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_59 (BatchNo (None, 7, 7, 192) 576 conv2d_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_50 (Activation) (None, 7, 7, 192) 0 batch_normalization_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_53 (Activation) (None, 7, 7, 192) 0 batch_normalization_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_58 (Activation) (None, 7, 7, 192) 0 batch_normalization_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_59 (Activation) (None, 7, 7, 192) 0 batch_normalization_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed6 (Concatenate) (None, 7, 7, 768) 0 activation_50[0][0] \n", + " activation_53[0][0] \n", + " activation_58[0][0] \n", + " activation_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_64 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_64 (BatchNo (None, 7, 7, 192) 576 conv2d_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_64 (Activation) (None, 7, 7, 192) 0 batch_normalization_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_65 (Conv2D) (None, 7, 7, 192) 258048 activation_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_65 (BatchNo (None, 7, 7, 192) 576 conv2d_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_65 (Activation) (None, 7, 7, 192) 0 batch_normalization_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_61 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_66 (Conv2D) (None, 7, 7, 192) 258048 activation_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_61 (BatchNo (None, 7, 7, 192) 576 conv2d_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_66 (BatchNo (None, 7, 7, 192) 576 conv2d_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_61 (Activation) (None, 7, 7, 192) 0 batch_normalization_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_66 (Activation) (None, 7, 7, 192) 0 batch_normalization_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_62 (Conv2D) (None, 7, 7, 192) 258048 activation_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_67 (Conv2D) (None, 7, 7, 192) 258048 activation_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_62 (BatchNo (None, 7, 7, 192) 576 conv2d_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_67 (BatchNo (None, 7, 7, 192) 576 conv2d_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_62 (Activation) (None, 7, 7, 192) 0 batch_normalization_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_67 (Activation) (None, 7, 7, 192) 0 batch_normalization_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_6 (AveragePoo (None, 7, 7, 768) 0 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_60 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_63 (Conv2D) (None, 7, 7, 192) 258048 activation_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_68 (Conv2D) (None, 7, 7, 192) 258048 activation_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_69 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_60 (BatchNo (None, 7, 7, 192) 576 conv2d_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_63 (BatchNo (None, 7, 7, 192) 576 conv2d_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_68 (BatchNo (None, 7, 7, 192) 576 conv2d_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_69 (BatchNo (None, 7, 7, 192) 576 conv2d_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_60 (Activation) (None, 7, 7, 192) 0 batch_normalization_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_63 (Activation) (None, 7, 7, 192) 0 batch_normalization_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_68 (Activation) (None, 7, 7, 192) 0 batch_normalization_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_69 (Activation) (None, 7, 7, 192) 0 batch_normalization_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed7 (Concatenate) (None, 7, 7, 768) 0 activation_60[0][0] \n", + " activation_63[0][0] \n", + " activation_68[0][0] \n", + " activation_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "flatten (Flatten) (None, 37632) 0 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "dense (Dense) (None, 1024) 38536192 flatten[0][0] \n", + "__________________________________________________________________________________________________\n", + "dropout (Dropout) (None, 1024) 0 dense[0][0] \n", + "__________________________________________________________________________________________________\n", + "dense_1 (Dense) (None, 1) 1025 dropout[0][0] \n", + "==================================================================================================\n", + "Total params: 47,512,481\n", + "Trainable params: 38,537,217\n", + "Non-trainable params: 8,975,264\n", + "__________________________________________________________________________________________________\n" + ] + } + ], + "source": [ + "from tensorflow.keras.optimizers import RMSprop\n", + "\n", + "# Flatten the output layer to 1 dimension\n", + "x = layers.Flatten()(last_output)\n", + "# Add a fully connected layer with 1,024 hidden units and ReLU activation\n", + "x = layers.Dense(1024, activation = 'relu')(x)# Your Code Here)(x)\n", + "# Add a dropout rate of 0.2\n", + "x = layers.Dropout(0.2)(x)# Your Code Here)(x) \n", + "# Add a final sigmoid layer for classification\n", + "x = layers.Dense(1, activation = 'sigmoid')(x)# Your Code Here)(x) \n", + "\n", + "model = Model(pre_trained_model.input, x) # Your Code Here, x) \n", + "\n", + "model.compile(optimizer = RMSprop(lr=0.0001), \n", + " loss = 'binary_crossentropy', # Your Code Here, \n", + " metrics = ['acc']) # Your Code Here)\n", + "\n", + "model.summary()\n", + "\n", + "# Expected output will be large. Last few lines should be:\n", + "\n", + "# mixed7 (Concatenate) (None, 7, 7, 768) 0 activation_248[0][0] \n", + "# activation_251[0][0] \n", + "# activation_256[0][0] \n", + "# activation_257[0][0] \n", + "# __________________________________________________________________________________________________\n", + "# flatten_4 (Flatten) (None, 37632) 0 mixed7[0][0] \n", + "# __________________________________________________________________________________________________\n", + "# dense_8 (Dense) (None, 1024) 38536192 flatten_4[0][0] \n", + "# __________________________________________________________________________________________________\n", + "# dropout_4 (Dropout) (None, 1024) 0 dense_8[0][0] \n", + "# __________________________________________________________________________________________________\n", + "# dense_9 (Dense) (None, 1) 1025 dropout_4[0][0] \n", + "# ==================================================================================================\n", + "# Total params: 47,512,481\n", + "# Trainable params: 38,537,217\n", + "# Non-trainable params: 8,975,264\n" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "HrnL_IQ8knWA" + }, + "outputs": [], + "source": [ + "# Get the Horse or Human dataset\n", + "path_horse_or_human = f\"{getcwd()}/../tmp2/horse-or-human.zip\"\n", + "# Get the Horse or Human Validation dataset\n", + "path_validation_horse_or_human = f\"{getcwd()}/../tmp2/validation-horse-or-human.zip\"\n", + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "\n", + "import os\n", + "import zipfile\n", + "import shutil\n", + "\n", + "shutil.rmtree('/tmp')\n", + "local_zip = path_horse_or_human\n", + "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", + "zip_ref.extractall('/tmp/training')\n", + "zip_ref.close()\n", + "\n", + "local_zip = path_validation_horse_or_human\n", + "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", + "zip_ref.extractall('/tmp/validation')\n", + "zip_ref.close()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "y9okX7_ovskI" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "500\n", + "527\n", + "128\n", + "128\n" + ] + } + ], + "source": [ + "# Define our example directories and files\n", + "train_dir = '/tmp/training'\n", + "validation_dir = '/tmp/validation'\n", + "\n", + "train_horses_dir = os.path.join(train_dir, 'horses') # Your Code Here\n", + "train_humans_dir = os.path.join(train_dir, 'humans')# Your Code Here\n", + "validation_horses_dir = os.path.join(validation_dir, 'horses')# Your Code Here\n", + "validation_humans_dir = os.path.join(validation_dir, 'humans')# Your Code Here\n", + "\n", + "train_horses_fnames = os.listdir(train_horses_dir)# Your Code Here\n", + "train_humans_fnames = os.listdir(train_humans_dir)# Your Code Here\n", + "validation_horses_fnames = os.listdir(validation_horses_dir)# Your Code Here\n", + "validation_humans_fnames = os.listdir(validation_humans_dir)# Your Code Here\n", + "\n", + "print(len(os.listdir('/tmp/training/horses/'))) # Your Code Here)\n", + "print(len(os.listdir('/tmp/training/humans/'))) # Your Code Here)\n", + "print(len(os.listdir('/tmp/validation/horses/'))) # Your Code Here)\n", + "print(len(os.listdir('/tmp/validation/humans/'))) # Your Code Here)\n", + "\n", + "# Expected Output:\n", + "# 500\n", + "# 527\n", + "# 128\n", + "# 128" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "O4s8HckqGlnb" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 1027 images belonging to 2 classes.\n", + "Found 256 images belonging to 2 classes.\n" + ] + } + ], + "source": [ + "# Add our data-augmentation parameters to ImageDataGenerator\n", + "train_datagen = ImageDataGenerator(rescale = 1./255.,\n", + " rotation_range = 40,\n", + " width_shift_range = 0.2,\n", + " height_shift_range = 0.2,\n", + " shear_range = 0.2,\n", + " zoom_range = 0.2,\n", + " horizontal_flip = True)# Your Code Here)\n", + "\n", + "# Note that the validation data should not be augmented!\n", + "test_datagen = ImageDataGenerator(rescale = 1./255.,\n", + " rotation_range = 40,\n", + " width_shift_range = 0.2,\n", + " height_shift_range = 0.2,\n", + " shear_range = 0.2,\n", + " zoom_range = 0.2,\n", + " horizontal_flip = True)# Your Code Here )\n", + "\n", + "# Flow training images in batches of 20 using train_datagen generator\n", + "train_generator = train_datagen.flow_from_directory(train_dir,\n", + " batch_size = 20,\n", + " class_mode = 'binary', \n", + " target_size = (150, 150)) # Your Code Here) \n", + "\n", + "# Flow validation images in batches of 20 using test_datagen generator\n", + "validation_generator = test_datagen.flow_from_directory(validation_dir,\n", + " batch_size = 20,\n", + " class_mode = 'binary', \n", + " target_size = (150, 150)) # Your Code Here)\n", + "\n", + "# Expected Output:\n", + "# Found 1027 images belonging to 2 classes.\n", + "# Found 256 images belonging to 2 classes." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Blhq2MAUeyGA" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/3\n", + "100/100 - 84s - loss: 0.2142 - acc: 0.9230 - val_loss: 0.2558 - val_acc: 0.9495\n", + "Epoch 2/3\n", + "100/100 - 80s - loss: 0.0604 - acc: 0.9738 - val_loss: 0.0426 - val_acc: 0.9823\n", + "Epoch 3/3\n", + "100/100 - 79s - loss: 0.0489 - acc: 0.9822 - val_loss: 0.4234 - val_acc: 0.9369\n" + ] + } + ], + "source": [ + "# Run this and see how many epochs it should take before the callback\n", + "# fires, and stops training at 97% accuracy\n", + " \n", + "callbacks = myCallback() # Your Code Here\n", + "history = model.fit_generator(\n", + " train_generator,\n", + " validation_data = validation_generator,\n", + " steps_per_epoch = 100,\n", + " epochs = 3,\n", + " validation_steps = 20,\n", + " verbose = 2)# Your Code Here (set epochs = 3))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "C2Fp6Se9rKuL" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO2dd3gUZdeH70PvTRAQpKmvSgslAipVQEEFpCgiiKiADQuKih2xoaJiF0UQLBRRKQqCtBd5+VACElAQQUVp0qvUwPP9cSZhExKyCZvM7ubc15VrZ2eemfnt7OS3Z55yHnHOYRiGYUQvufwWYBiGYWQtZvSGYRhRjhm9YRhGlGNGbxiGEeWY0RuGYUQ5ZvSGYRhRjhl9DkREcovIfhGpFMqyfiIi54pIyPsKi0grEVkX8H61iDQJpmwmzjVCRB7N7P6GkRZ5/BZgpI+I7A94Wwg4DBzz3t/mnPs0I8dzzh0DioS6bE7AOXd+KI4jIr2BHs655gHH7h2KYxtGSszoIwDnXJLRehFjb+fcrLTKi0ge51xCdmgzjPSw+9F/rOomChCRZ0VkvIiMFZF9QA8RuVhEFonIbhHZLCJviEher3weEXEiUsV7/4m3fbqI7BOR/xORqhkt621vKyK/icgeEXlTRP4nIr3S0B2MxttEZK2I7BKRNwL2zS0ir4nIDhH5A2hziuvzmIiMS7HubRF51VvuLSKrvM/zuxdtp3WsDSLS3FsuJCIfe9p+AeqnKPu4iPzhHfcXEWnvra8FvAU08arFtgdc20EB+9/uffYdIjJJRMoHc20ycp0T9YjILBHZKSL/iMhDAed5wrsme0UkTkTOSq2aTEQWJH7P3vWc751nJ/C4iJwnInO9c2z3rlvxgP0re59xm7f9dREp4Gm+MKBceRE5ICJnpPV5jVRwztlfBP0B64BWKdY9CxwB2qE/3gWBi4CG6FNbNeA3oJ9XPg/ggCre+0+A7UAskBcYD3ySibJnAvuADt62+4GjQK80PkswGicDxYEqwM7Ezw70A34BKgJnAPP1dk71PNWA/UDhgGNvBWK99+28MgJcBhwEanvbWgHrAo61AWjuLQ8F5gElgcrAyhRlrwPKe9/JDZ6Gst623sC8FDo/AQZ5y5d7GusABYB3gDnBXJsMXufiwBbgXiA/UAxo4G17BIgHzvM+Qx2gFHBuymsNLEj8nr3PlgDcAeRG78f/AC2BfN598j9gaMDn+dm7noW98pd6294Hngs4zwPAV37/H0ban+8C7C+DX1jaRj8nnf0GAJ97y6mZ93sBZdsDP2ei7C3A9wHbBNhMGkYfpMZGAdu/BAZ4y/PRKqzEbVemNJ8Ux14E3OAttwVWn6Ls18Bd3vKpjP7vwO8CuDOwbCrH/Rm4yltOz+hHA88HbCuGtstUTO/aZPA63wgsTqPc74l6U6wPxuj/SEdDl8TzAk2Af4DcqZS7FPgTEO/9MqBTqP+vov3Pqm6ih/WBb0TkAhH5xnsU3wsMBkqfYv9/ApYPcOoG2LTKnhWow+l/5oa0DhKkxqDOBfx1Cr0AnwHdvOUbvPeJOq4WkR+8aoXdaDR9qmuVSPlTaRCRXiIS71U/7AYuCPK4oJ8v6XjOub3ALqBCQJmgvrN0rvPZqKGnxqm2pUfK+7GciEwQkY2eho9SaFjntOE/Gc65/6FPB41FpCZQCfgmk5pyLGb00UPKroXD0QjyXOdcMeBJNMLOSjajEScAIiIkN6aUnI7GzahBJJJe988JQCsRqYBWLX3maSwITAReQKtVSgAzg9TxT1oaRKQa8C5afXGGd9xfA46bXlfQTWh1UOLxiqJVRBuD0JWSU13n9cA5aeyX1rZ/PU2FAtaVS1Em5ed7Ee0tVsvT0CuFhsoikjsNHWOAHujTxwTn3OE0yhlpYEYfvRQF9gD/eo1Zt2XDOb8G6olIOxHJg9b7lskijROA+0Skgtcw9/CpCjvn/kGrFz5Cq23WeJvyo/XG24BjInI1WpccrIZHRaSE6DiDfgHbiqBmtw39zeuDRvSJbAEqBjaKpmAscKuI1BaR/OgP0ffOuTSfkE7Bqa7zFKCSiPQTkfwiUkxEGnjbRgDPisg5otQRkVLoD9w/aKN/bhHpS8CP0ik0/AvsEZGz0eqjRP4P2AE8L9rAXVBELg3Y/jFa1XMDavpGBjGjj14eAG5CG0eHo42mWYpzbgvQFXgV/cc9B/gJjeRCrfFdYDawAliMRuXp8Rla555UbeOc2w30B75CGzS7oD9YwfAU+mSxDphOgAk555YDbwI/emXOB34I2Pc7YA2wRUQCq2AS9/8WrWL5ytu/EtA9SF0pSfM6O+f2AK2BzuiPz29AM2/zy8Ak9DrvRRtGC3hVcn2AR9GG+XNTfLbUeApogP7gTAG+CNCQAFwNXIhG93+j30Pi9nXo93zYObcwg5/d4EQDh2GEHO9RfBPQxTn3vd96jMhFRMagDbyD/NYSidiAKSOkiEgbtIfLQbR73lE0qjWMTOG1d3QAavmtJVKxqhsj1DQG/kDrpq8AOlrjmZFZROQFtC//8865v/3WE6lY1Y1hGEaUYxG9YRhGlBN2dfSlS5d2VapU8VuGYRhGRLFkyZLtzrlUuzOHndFXqVKFuLg4v2UYhmFEFCKS5uhwq7oxDMOIcszoDcMwohwzesMwjCjHjN4wDCPKMaM3DMOIcszoDcMwohwzesMwjCjHjN4wgsA5+OwzWLbMbyVGVHPspEm2QkLYDZgyjHBj61bo1QumT4ciRWDGDLjkEr9VGRGNc/DXXxAfr3/Ll+tr1aowc2bIT2dGbxin4Ntv4aabYM8eGDIEPvwQrrhC1196afr7Gwb//gs//5zc0Jcvh717dbsInHMOxMRA48ZZIsGM3jBS4fBhGDgQhg2DmjVh9mx9vfFGaNEC2rTRCD+L/i+NSMQ5+Pvvk6P0tWt1G0DRolC7NvTooa8xMXpjFUl1XveQYUZvGClYtQq6ddP/0bvvhhdfhIIFddtZZ8G8ecnNvkkTX+UafnDgQOpR+p49J8okRundu+trTAxUrgy5sr9p1IzeMDycg/ffh/79oXBhmDoVrr765HLly8PcuXDZZdC2LUybBk2bZr9eIxtwDtavT27o8fGwZs2JKL1IEY3Ou3U7Yeg1a2r0HiYEZfTe9HCvA7mBEc65ISm2VwZGAmXQCZZ7JM5WLyIvAVehPXy+A+51NtuJEWbs2AF9+sBXX0Hr1jB6tBp6WiSafYsWcOWVZvZRwcGDqUfpu3efKFOtmhp5oKlXqeJLlJ4R0jV6b4Lnt9GZ4jcAi0VkinNuZUCxocAY59xoEbkMeAG4UUQuAS4FanvlFqAzzM8L3UcwjNNj7lyte9+6FYYO1Yg+mP/bcuVOjuybNct6vcZp4hxs2JB6lH78uJYpXFij9K5dk0fpxYr5qz2TBBPRNwDWOuf+ABCRcehEvYFGXx2431ueC0zylh1QAMgHCJAX2HL6sg3j9Dl6FJ58UuvgzzsPFi2CevUydoxAs7/ySvjmG2jePEvkGpnh4EH45Zfkhr58OezadaJM1apq5ImmXru2Ru5hHqVnhGCMvgKwPuD9BqBhijLxQCe0eqcjUFREznDO/Z+IzAU2o0b/lnNuVcoTiEhfoC9ApUqVMvwhDCOjrF0LN9wAixdD797au6Zw4cwdq2xZmDMnudm3aBFavUY6OAcbN54cpf/2W/IovVYtuPbaE1F6rVoRG6VnhFA1xg4A3hKRXsB8YCNwTETOBS4EKnrlvhORJs657wN3ds69D7wPEBsba/X3RpbhHIwZA/36Qd68MHEidO58+sctW/ZEZH/VVfD117psZAGHDqUepe/ceaJMlSoamSeaeu3a2gsmiqL0jBCM0W8Ezg54X9Fbl4RzbhMa0SMiRYDOzrndItIHWOSc2+9tmw5cDCQzesPIDnbvhjvugHHjtC7944/h7LPT3y9YzjxTI/uWLbW3ztSpumxkEudg06aTG0dXrz6RKqBQIY3KO3dOHqUXL+6v9jAjGKNfDJwnIlVRg78euCGwgIiUBnY6544Dj6A9cAD+BvqIyAto1U0zYFiItBtG0CxcqFU1GzbAs8/qYKjcuUN/ntTMvlWr0J8n6jh0CFauPDlK37HjRJnKlTUy79QpeZSeFV9klJGu0TvnEkSkHzAD7V450jn3i4gMBuKcc1OA5sALIuLQqpu7vN0nApcBK9CG2W+dc1ND/zEMI3USEuD55+Hpp9UnFiyARo2y9pxlyuhI2pYtoV07mDJFu2waaJS+efPJhv7rryei9IIFtYdLx44nDL12bShRwl/tEYyEW5f22NhYFxcX57cMIwr46y8dlPi//+mI87ffzt52t+3b1ex/+w0mT4bLL8++c4cFhw8nj9ITX7dvP1GmUqUTqQASTf3ccy1KzwQissQ5F5vaNhsZa0Ql48fDbbdph4uPP1ajz25Kl9bIvlUraN9eI/uoNHvn4J9/Uo/SExK0TIECGqV36JA8Si9Z0l/tOQQzeiOq2LcP7rkHPvpIq2g+/VS7RPtFSrOfPFmzX0YsR45oMqCUibu2bTtR5uyz1cTbtz8RrZ93nkXpPmJGb0QNixdrg+sff8ATT+hf3rx+q4IzzoBZs7SevkMHmDRJE6KFPalF6atWnYjS8+fXKL1du+RReqlS/uo2TsKM3oh4jh/X1AWPPXZipGq45Z1JNPtWrU6Yfdu2fqvyOHJEq1lSRulbt54oU6GCmvnVVyeP0vOYhUQC9i0ZEc3GjdCzp3Zp7NJFs0+Ga7VvqVInIvtrrtEEaldemc0itmw5uXF01SrNBwEapdeooaO+Eg29dm39pTIiFjN6I2KZPBluvVXTmYwYAbfcopP1hDOlSsF336nZd+wIX36pnhpyjh5NPUrfEpBq6qyz1MivvPKEqf/nPxalRyH2jRoRx4ED8MAD8N57ULcujB0L55/vt6rgCYzsO3WCL75IPe990GzbdrKhr1x5IkrPl0+j9LZtk0fppUuH5PMY4Y8ZvRFRxMdrKvBVq2DAAB3lmj+/36oyTsmSJ5t9u3bp7HT0qA7/T5m4659/TpQpX16NvE2b5FF6OLRKG75hRm9EBM7BG2/AQw9pRDxzZuSPNi1RQqtxLr9cU7VMnKg9EgEdVJRalH7kiG7Plw+qV9e+moFRepkyvn0eI3wxozfCnq1boVcvnZ/16qth5Mjo8bMSRRKY+foarrixDF06luDzmOfo8M9wTROQSLlyauStW58YQXr++RalG0FjRm+ENd9+qya/eze89RbceWf4N7imyY4dqUbpJQ4fZibFuEJmcu2yR/m8WXE6PHDsRJR+5pl+KzciHDN6Iyw5fBgeeQRee03H5Myapa8RQUKCJrhJWZe+adOJMmXLqpHffTfExFA8JoYZ5c6nTfu8dFlwHxPugY6W9dIIEWb0RtixapU2uMbH6wQhL72kCQ3Dkp07Tzb0X37RXyrQ6pULL9RZSAITd5Ute9KhigMzZmi1+3XXab6eTp2y9+MY0YkZvRE2OKcDnvr311nfpk49zW6HoWbzZpg/P3n1y4YNJ7afeaYaeb9+J0z9ggu04TRIihVTs2/TRqcwHTcuNDNgGTkbM3ojLNixA/r00dGirVvD6NHaUzBsiI/XvAp79+qAogsv1GmqAqP0cuVCcqpixbRtom1bNfvx483sjdPDjN7wnblz4cYbtXfN0KEa0YfV1J7r1qnrFium/Trr1MnyzvspzX7sWJ3+1DAyQzj9Oxk5jKNH4dFHdXKOwoVh0SId8RpWJr99u9ajHDyoztuwYbaN0CpaVLuUXnyxtllMmJAtpzWiEIvoDV9Yu1ZTCi9eDL17w7BhavZhxb//aiPBX3/pyKYaNbJdQtGiMG2apqO54QZtx+jaNdtlGBFOOMVORg7AOa1/r1sX1qyBzz+HDz4IQ5NPSFBHXbxY600aN/ZNSmJkf8klOjXi+PG+STEiFDN6I9vYvVuj0l69oF497bTSpYvfqlLBOZ2H8JtvdKLZa67xWxFFimhkf+mleg3HjfNbkRFJmNEb2cLChdqG+fnnmohszhydcS4sefJJzbPw5JNw++1+q0ki0eybNNHI/rPP/FZkRApm9EaWkpAAgwerOeXKBQsW6ExQYTt96Dvv6C9R794waJDfak6icGF90GjaVHsqffqp34qMSMAaY40s46+/oEcPNfcePbQWpFgxv1Wdgi+/1MFO7drBu++GbVKdwoXh669VZs+eWtPUo4ffqoxwxiJ6I0uYMEHHEcXHw8cf619Ym/z8+Vr53aiRVoCH+SxLiWbfrBncdBN88onfioxwJiijF5E2IrJaRNaKyMBUtlcWkdkislxE5olIRW99CxFZFvB3SET8b9kysoz9+3VKv65ddfT/smUREG2uWKGJ4KtW1bwLhQr5rSgoChVSs2/eXCP7MWP8VmSEK+kavYjkBt4G2gLVgW4iUj1FsaHAGOdcbWAw8AKAc26uc66Oc64OcBlwAJgZQv1GGLF4sXab/OgjePxx+P57qFbNb1Xp8PffOvy0cGEdEBVhk2AXKqS/TS1bam+m0aP9VmSEI8FE9A2Atc65P5xzR4BxQIcUZaoDc7zlualsB+gCTHfOHcisWCM8OX5cM0xecgkcOgTz5sEzz0TAvBg7d+qo1/371eQrV/ZbUaYoVAimTFGzv/lm/aE1jECCMfoKwPqA9xu8dYHEA4kJVTsCRUUkZWh0PTA2tROISF8RiRORuG3btgUhyQgXNm7UJGQPP6zdzZcv1x4hYc/Bg9qa+fvvMHky1Krlt6LTomBBNftWrbTqbNQovxUZ4USoGmMHAM1E5CegGbAROJa4UUTKA7WAGant7Jx73zkX65yLLRMtc8TlACZP1gbXRYtgxAhtgC1Z0m9VQZCQANdfD//3f9o/sVkzvxWFhIIF9Ttp3RpuvVWHAhgGBGf0G4HAoS0VvXVJOOc2Oec6OefqAo9563YHFLkO+Mo5d/Q09RphwIEDOqXfNddApUqwdKkaS5j2RkyOc3DXXRr+vvFGmA7NzTyJZn/55ToU4MMP/VZkhAPBGP1i4DwRqSoi+dAqmCmBBUSktIgkHusRIGUs0Y00qm2MyGL5crjoIu1mPmCABsXnn++3qgwweLDObvLoo9pnPgopUAAmTdKZqnr31qctI2eTrtE75xKAfmi1yypggnPuFxEZLCLtvWLNgdUi8htQFngucX8RqYI+Efw3pMqNbMU5DYAbNNA2zJkz4eWXsy1jb2h4/30d7dqrl45+jWIKFNBJXNq21QldPvjAb0WGn4hzzm8NyYiNjXVxcXF+yzAC2LpVvXH6dM3aO3IkRFxTyuTJOgFrmzYa7oZ9l6DQcOiQzk41bRoMHw59+/qtyMgqRGSJcy42tW02MtY4Jd9+q7PkzZkDb72lVdsRZ/L/+582vsbGaotxDjF50Mj+yy/hqqs0Iefw4X4rMvzAjN5IlcOH4f779dG/TBmIi9M2zIhocA1k5UrtRlmpkmYDC7vE91lP/vzwxRf6NHb77dq+YuQszOiNk1i1SmfMe+01ba/88UeoWdNvVZlgwwatqsmfXx9NSpf2W5Fv5M8PEyfqb96dd2qSTiPnYEZvJOGctlfWr68DoaZOhTff1C57EceuXfo4snu3Ni5Ureq3It/Jn1/nA2jXTp/O3n7bb0VGdmFGbwCwY4c22t12m86at3y5PupHJIcOQYcOsHq1NrzWqeO3orAhMbJv316f1szscwZm9AZz5+oI16+/hqFDtZajfHm/VWWSY8d0+qXvv9fcyJdd5reisCNfPo3sO3RQs3/zTb8VGVmNGX0O5uhRHTfUsqW2US5aBA88oDNBRSTOwT33aDeTYcM0V7KRKvnyaQekjh31kr3xht+KjKwkvGdXMLKMtWt1no3Fi3X05LBhUdAh5fnntZXxoYfg3nv9VhP25MsH48fr7+G99+rvpF226CRSYzcjkzinE1TUrQtr1ugj/AcfRIHJjxypSfBvvBFeeMFvNRFD3rxq9p06wX336Q++EX2Y0ecg9uzR6uubboJ69bTBNSpyen39tQ75vOIKzeIVsXVP/pA3r86e2Lkz9O+v3WqN6ML+I3IICxdq55MJEzTNy5w5cPbZ6e8X9ixaBNddp48oEyfmqFGvoSRvXhg7Fq69VgfKvfKK34qMUGJ19FFOQoJWXT/9tE6gtGCBzn8dFfz6q47tP+ssHfVapIjfiiKavHk1PT9oZlLn9NWIfMzoo5i//tKJuRcs0Ne334ZixfxWFSI2bdJRr3nywIwZcOaZfiuKCvLmhc8+01QXDz6oZv/gg36rMk4XM/ooZcIErbY+fly7k/fo4beiELJnj4563bFDJ6g95xy/FUUVefJoZC+iHZic01cjcjGjjzL279d+0aNGab6azz6DatX8VhVCDh/Wqa1WrtTcu/Xr+60oKsmTBz75RM3+4Yc1YBg40G9VRmYxo48i4uK0b/zatdrT8Mkno6xt8vhx7T45b566UOvWfiuKavLk0afBXLngkUc0sn/kEb9VGZnBjD4KOH5cUxc89hiUK6cpDaJkvusTOKcdvT//XD9s9+5+K8oR5MkDo0fr8qOP6tfw6KP+ajIyjhl9hLNxI/Tsqd0lu3TRiSVKlfJbVRbw0kualOX++zVPg5Ft5Mmjg+xENJhwTl+NyMGMPoKZPBluvRUOHtQJoG+5JQInBgmG0aO1grhbN52o1sh2cufWryFXLq0WdE5fjcjAjD4COXBA+ze/+66OExo7Fs4/329VWcT06fpr1qoVfPSRjXr1kdy5tZFfBJ54QqsMn3zSb1VGMJjRRxjLl2tgu3Klmv2zz2qO8ajkxx+1Pqp2bZ0LL18+vxXleHLn1rRCIvDUUxrZP/WU36qM9DCjjxCc0yrqhx6CkiVh5swo73SyZo2Oei1bVrtRRs1Ir8gnd25NKSQCgwbpvTlokN+qjFNhRh8BbN0KN9+sfnf11RpRlSnjt6os5J9/NEEZ6KjXcuX81WOcRO7c2i4kouk1Es0+KtuIogAz+jDn22+hVy+d+vStt3Ri56j+Z9q7F668ErZs0f7y553ntyIjDQLNfvBgNfunn47y+zNCCaplS0TaiMhqEVkrIieNjxORyiIyW0SWi8g8EakYsK2SiMwUkVUislJEqoROfvRy+LD2JGzbVqP3uDid0Dmq/4mOHNHE6CtWaJ38RRf5rchIh1y5dD6DW2+FZ57Rxlnn/FZlpCTdiF5EcgNvA62BDcBiEZninFsZUGwoMMY5N1pELgNeAG70to0BnnPOfSciRYDjIf0EUciqVdrgGh+vc3q+9BIULOi3qizm+HF9dJk9W/vxtWnjtyIjSHLlgvff1yDk2WfV6J95JsqDkggjmKqbBsBa59wfACIyDugABBp9deB+b3kuMMkrWx3I45z7DsA5tz9EuqMS5zQ6uu8+nfFp6lStk88RPPig9hMdMkRHgBkRRa5cOlhPBJ57Tu/lZ581sw8XgjH6CsD6gPcbgIYpysQDnYDXgY5AURE5A/gPsFtEvgSqArOAgc65Y6crPNrYsQP69IGvvtLeNKNHQ/nyfqvKJl55BV59VbOxWZrEiCVXLnjvPTX3559Xs3/uOTP7cCBUjbEDgLdEpBcwH9gIHPOO3wSoC/wNjAd6AR8G7iwifYG+AJUqVQqRpMhh7lzN1bV1q6Zx6d8/B40L+uQTHRBw3XU6h525QkSTK5cO5MuVS6fuPX5cX+1r9ZdgjH4jEDjpXEVvXRLOuU1oRI9XD9/ZObdbRDYAywKqfSYBjUhh9M6594H3AWJjY3NMU87RozrYZMgQ7VyyaJHO5ZpjmDlT+402b67JVHLMr1t0kyuXTnIjAi++qJH9kCFm9n4SjNEvBs4TkaqowV8P3BBYQERKAzudc8eBR4CRAfuWEJEyzrltwGVAXKjERzJr12pK4cWLoXdvGDZM6+VzDHFx2sOmenWYNCmKh/fmTBLNHrQzgXNq+mb2/pCu0TvnEkSkHzADyA2MdM79IiKDgTjn3BSgOfCCiDi06uYub99jIjIAmC0iAiwBPsiajxIZOKc5vu+6S7MCfv65jvLPUaxdq33lS5fWXDbFi/utyMgCRE5E9i+/rPf+Sy+Z2ftBUHX0zrlpwLQU654MWJ4ITExj3++A2qehMWrYswfuuEM7lzRtqtXTZ5+d/n5RxZYt2nXy+HEd9XrWWX4rMrIQER3olyuXtj8lzp1gZp+92MjYbGLhQp0rY/167XY2cKCOLMxR7Nun+Ws2bdIE+lGbctMIRATeeENfX31VI/tXXjGzz07M6LOYhATtajZ4MFSqBAsWQKNGfqvygSNHtI5q2TKtk8+RFyHnIgKvv66vr72mZv/qq2b22YUZfRby11/Qo4eae/fu8M47OTQJ4/HjOivKzJma9jDHjAIzAhHRTgeJr85Zj9rswow+i5gwAfr2VY/7+GM1/BzLwIHw6adaZ3XLLX6rMXwkMaIPNPtE8zeyDjP6ELN/vw7wHDUKGjaEzz6DatX8VuUjr72mXS7uvNNmlTaAE3X1uXKdqLNPrNYxsgYz+hASF6d949eu1fk0n3wS8ub1W5WPjBunKTg7dTrRGmcY6K2Q2PvmlVfU7O0WyTrM6ENAYpexxx7TOTLmzoVmzfxW5TOzZ2tysqZNtdomx3UxMtIjsX99ouknzqJmZh96zOhPk02b1M9mz4bOnTVda6lSfqvymZ9+go4dtfvk5MlQoIDfiowwReTEIKqXX9agKbHfvRE6zOhPg8mTdcKFgwdPTL6Q46ORP//U2VJKlNBRryVK+K3ICHMSc+Ikmr5zOqLWzD50mNFnggMHNOHiu+9C3bo60tXG/gDbtulcr0eO6ICoihXT38cwUJMfMkTNfcgQNft33jGzDxVm9Blk+XKd/WnlSjX7Z5+1fFyAdje66iod+jtrliYrM4wMkJjHXkRTGzt3IuWxcXqY0QdJYkPRQw9ByZI69qd1a79VhQlHj2o++SVL4Msv4dJL/VZkRCiJM1QFTl7y3ntm9qeLGX0QbN2qadOnTdNBnSNH6oTdBvqf2KeP1scPHw4dOvityIhwEueeDZyWcPhwM/vTwYw+HWbMgJtugt27tTfAnXdag2syHntM5z0cNEiHAiZScYgAACAASURBVBtGCBDRCcZz5dJX57RHm5l95jCjT4PDh3Ug56uvQo0a8N13UKuW36rCjDff1MrUvn11dJhhhBARePppfR08WM3+gw/M7DODGX0qrFqlI1yXLYN+/bTLV8GCfqsKMz7/HO69V6tqEmeXMIwQE2j2Tz+tZj9ihJl9RjGjDyAxYrjvPp3Wb+pUS7SYKvPmaZa2Sy7RvqV57DYyspZBg/Q10OxtsHXw2H+ox44d2qb41Vfam2b0aChf3m9VYcjy5RrFn3suTJlijzpGtjFokEb2gwbpCNqRI83sg8WMHs1Nc+ON2rtm6FDo398eDVPlr790GsCiReHbby3Xg5HtPPWU/m8++aRG9qNGmdkHQ442+qNH9cYZMgTOOw8WLYJ69fxWFabs2KGjXg8e1JlUctxkt0a48MQTGtk/8YS+N7NPnxxr9L//rg2uP/6oOWqGDYMiRfxWFaYcOKCNFevWafejGjX8VmTkcB5/XM3+8cc1sv/oIzP7U5HjjN45nfHprru0DfHzz3UqUyMNEhKga1f9RZw4EZo08VuRYQA6hENEX48f13Y16xeQOjnqsuzZA3fcoR1FmjaFTz6xGohT4hzcfjt8/bUmHenY0W9FhpGMRx/VOvtHHtHbdcwYM/vUyDGXZOFCnaB7/XodXj1woD3qpctTT+lk3k88oYZvGGHIwIEa2Q8ceOKJ3cw+OVF/ORISNDnS4MFQqZK2IzZq5LeqCODdd3Xs+a23audlwwhjHn5Yzf7hh/W9mX1ygroUItIGeB3IDYxwzg1Jsb0yMBIoA+wEejjnNnjbjgErvKJ/O+fah0h7uvz9t0bxCxbo6zvvQLFi2XX2CObLL7UR4+qrNXWgjXo1IoCHHtJb9aGHtM7+00/N7BNJ9zKISG7gbaA1sAFYLCJTnHMrA4oNBcY450aLyGXAC8CN3raDzrk6IdadLhMmaAqW48f1171Hj+xWEKF8/712R2rYEMaPt/8UI6J48EE1+wcf1GqcTz+FvHn9VuU/wfwXNwDWOuf+ABCRcUAHINDoqwP3e8tzgUmhFJkR9u+He+7RvrUNG+oXfc45fqmJMH7+Gdq3hypVtAG2UCG/FRlGhhkwQBtoH3hAzf6zz8zsgxn/WQFYH/B+g7cukHigk7fcESgqImd47wuISJyILBKRa1I7gYj09crEbdu2LQPykxMXpwOePvpIu1x9/72ZfNCsX6+jXgsW1NzMZ5yR/j6GEabcf79mnp04UWeEO3rUb0X+Eqrn8gHAWyLSC5gPbASOedsqO+c2ikg1YI6IrHDO/R64s3PufeB9gNjYWJcZAb/+ChdfDOXKaUqDZs0y+1FyIDt36qjXffv017FyZb8VGcZp07+/vt5/v5r92LE5N7IPxug3AoG9zSt665Jwzm3Ci+hFpAjQ2Tm329u20Xv9Q0TmAXWBZEYfCi64AF5/Ha6/3lKwZIiDB7W65vffNZKvXdtvRYYRMhLzVt13n477GzcO8uXzW1X2E0zVzWLgPBGpKiL5gOuBKYEFRKS0iCQe6xG0Bw4iUlJE8ieWAS4led1+SLnzTjP5DJGQoKHOwoU6eqx5c78VGUbIufdeDQK/+krN/sgRvxVlP+kavXMuAegHzABWAROcc7+IyGARSewq2RxYLSK/AWWB57z1FwJxIhKPNtIOSdFbx/AL57QL5eTJ+l9w7bV+KzKMLOOee+CNN2DSJJ3HPqeZvTiXqSrxLCM2NtbFxcX5LSP6GTxYR74+8oiOKDOMHMBbb8Hdd+uUChMmRFc1jogscc7FprbNsq7nRD74QE3+ppvguefSL28YUUK/fmr2kydrMsPDh/1WlD2Y0ec0pkzRvDVt26rh26hXI4dx1106zfHUqTnH7M3ocxILF2prVP36mp85p/Y1M3I8d96p6Zy+/ho6d45+szejzymsWqW5a84+G775Rmc/N4wczO23ayqnb76JfrM3o88JbNyoA6Ly59e+8mXK+K3IMMKC226D4cPV7Dt1gkOH/FaUNVjGqmhn925NbbB7N/z3v1C1qt+KDCOs6NtXm6r69lWz//JLKFDAb1WhxSL6aObQIe1Htnq1jhapW9dvRYYRlvTpo30Tpk/XidSiLbI3o49Wjh3T3Mzz5+v8ai1b+q3IMMKa3r1hxAit3bzmmugyezP6aMQ5Hff9xRfw2muaAMgwjHS59VY1+5kz9WH44EG/FYUGM/po5IUXtKPwgw9qNifDMILmllt0quTvvoseszejjzZGjdJk/D16wJAh6Zc3DOMkbr4ZRo6EWbM0ueuBA34rOj3M6KOJb77RVqXLL9eQJJd9vYaRWXr10rhp9uzIN3tzgmjhhx80A2WdOjqtTjRlazIMn7jpJp2xbs4caNcucs3ejD4aWL0arroKzjpLo/qiRf1WZBhRQ8+eMHq0zlwXqWZvRh/pbNqko15z59Z+YWXL+q3IMKKOG2/UXsrz5mkmkX//9VtRxjCjj2T27NEslNu3w7RpNhO6YWQhPXqo2f/3v5Fn9mb0kcrhwzqqY+VKHbNdv77figwj6uneHT7+WMchXnVV5Ji9GX0kcvy4PkvOm6fdAi6/3G9FhpFjuOEGnWL5++/hyith/36/FaWPGX2k4ZwOgvr8c3j5ZX2eNAwjW+nWDT79FBYsiAyzN6OPNF56Cd58E/r3hwce8FuNYeRYrr8ePvtM5/Np2xb27fNbUdqY0UcSo0fDwIF6hw0datMAGobPdO0KY8fC//1feJu9GX2kMH26Zlxq2VJHcNioV8MIC669Vs1+0aLwNXtzi0jgxx91FuNatbSHTf78fisyDCOAa6+FcePU7Nu0gb17/VaUHDP6cGfNGu3HVbasRvXFivmtyDCMVOjSBcaP17gs3Mw+KKMXkTYislpE1orIwFS2VxaR2SKyXETmiUjFFNuLicgGEXkrVMJzBP/8o6NeAb79FsqV81ePYRinpHNnNfvFi/Vfd88evxUp6Rq9iOQG3gbaAtWBbiJSPUWxocAY51xtYDDwQortzwDzT19uDmLvXu23tWWL5q/5z3/8VmQYRhB06gQTJkBcXPiYfTARfQNgrXPuD+fcEWAc0CFFmerAHG95buB2EakPlAVmnr7cHMKRI3q3LF+umSgbNPBbkWEYGaBjRx3qsmRJeJh9MEZfAVgf8H6Dty6QeKCTt9wRKCoiZ4hILuAVYMCpTiAifUUkTkTitm3bFpzyaOX4cU2EPXu25pRv29ZvRYZhZIJrrtE4belSHby+e7d/WkLVGDsAaCYiPwHNgI3AMeBOYJpzbsOpdnbOve+ci3XOxZYpUyZEkiKUBx/UvlovvKDJsA3DiFg6dFCz/+knf80+TxBlNgJnB7yv6K1Lwjm3CS+iF5EiQGfn3G4RuRhoIiJ3AkWAfCKy3zl3UoOuAbzyCrz6Ktx9Nzz8sN9qDMMIAe3bwxdfaENt69Y68XjJktmrIZiIfjFwnohUFZF8wPXAlMACIlLaq6YBeAQYCeCc6+6cq+Scq4JG/WPM5NPg009hwADtkPvaazbq1TCiiHbtdAjM8uVq9rt2Ze/50zV651wC0A+YAawCJjjnfhGRwSLS3ivWHFgtIr+hDa/PZZHe6GTmTK2Xb95cE17nzu23IsMwQszVV6vZr1gBrVrBzp3Zd25xzmXf2YIgNjbWxcXF+S0j+1iyRA2+WjVNcl28uN+KDMPIQqZN0145NWrArFlQqlRojisiS5xzsalts5GxfvL779pXvlQpHfVqJm8YUc+VV8KkSTpnUHZF9mb0frF1q3awTUjQuV7POstvRYZhZBNt254w+5YtYceOrD2fGb0f7N+vP+ubNsHXX8MFF/ityDCMbKZNG5g8GVat0sg+K83ejD67OXJE+1ktW6bjpC++2G9FhmH4xBVXnDD7li1h+/asOY8ZfXZy/LjmlJ85E4YP12Z4wzByNFdcAVOmwOrVGuUfOxb6cwQzYMoIFY88orMKP/OMGr5hGAY6anbqVJ20JCt6V5vRZxfDhul8r3fcAY895rcawzDCjFatsu7YVnWTHYwbp5N5d+qkE3vbqFfDMLIRM/qsZs4c6NkTmjTRNAc26tUwjGzGjD4rWbZMc5X+5z/atF6ggN+KDMPIgZjRZxV//qmjIooX12kAsztdnWEYhoc1xmYF27Zpn6nDh3UCkYoV09/HMAwjizCjDzX//qv949ev14xF1VNOr2sYhpG9mNGHkqNH4brrdFbgL76ASy/1W5FhGIYZfchwDvr21Ryk772njbCGYRhhgDXGhorHH4ePPoKnnoLbbvNbjWEYRhJm9KHgrbfg+eehTx81esMwjDDCjP50mTgR7rlHZwB+5x0b9WoYRthhRn86zJsH3btrquGxYyGPNXkYhhF+mNFnluXLoUMHOOccTTtXqJDfigzDMFLFjD4z/PWXjnotUkRHvYZqdl/DMIwswOoaMsqOHTo7wL//wvffQ6VKfisyDMM4JWb0GeHAAWjXTvPYzJgBtWr5rcgwDCNdzOiDJSEBrr8eFi2Czz+HZs38VmTkAI4ePcqGDRs4dOiQ31KMMKFAgQJUrFiRvHnzBr1PUEYvIm2A14HcwAjn3JAU2ysDI4EywE6gh3Nug7f+K7QtIC/wpnPuvaDVhQvO6cxQU6dqF8rOnf1WZOQQNmzYQNGiRalSpQpiXXdzPM45duzYwYYNG6hatWrQ+6XbGCsiuYG3gbZAdaCbiKTM1DUUGOOcqw0MBl7w1m8GLnbO1QEaAgNF5Kyg1YULgwbBiBE6+vWOO/xWY+QgDh06xBlnnGEmbwAgIpxxxhkZfsILptdNA2Ctc+4P59wRYBzQIUWZ6sAcb3lu4nbn3BHn3GFvff4gzxdevPceDB4Mt9yir4aRzZjJG4Fk5n4IxngrAOsD3m/w1gUSD3TyljsCRUXkDE/U2SKy3DvGi865TRlW6RdffQV33aVph4cPt1GvhmFEJKGKsAcAzUTkJ6AZsBE4BuCcW+9V6ZwL3CQiZVPuLCJ9RSROROK2bdsWIkmnyYIF0K0bNGgA48fbqFcjR7Jjxw7q1KlDnTp1KFeuHBUqVEh6f+TIkaCOcfPNN7N69epTlnn77bf59NNPQyHZSAVxzp26gMjFwCDn3BXe+0cAnHMvpFG+CPCrc+6kaZVEZCQwzTk3Ma3zxcbGuri4uOA/QVbwyy/QuDGULauGX7q0v3qMHMuqVau48MIL/ZYBwKBBgyhSpAgDBgxItt45h3OOXLkir2b2dEhISCCPTwFgaveFiCxxzsWmVj6Yb2YxcJ6IVBWRfMD1wJQUJygtIonHegTtgYOIVBSRgt5ySaAxcOqfdr9Zv14HRBUsqKNezeSNcOG++6B589D+3XdfpqSsXbuW6tWr0717d2rUqMHmzZvp27cvsbGx1KhRg8EB7VmNGzdm2bJlJCQkUKJECQYOHEhMTAwXX3wxW7duBeDxxx9n2LBhSeUHDhxIgwYNOP/881m4cCEA//77L507d6Z69ep06dKF2NhYli1bdpK2p556iosuuoiaNWty++23kxjM/vbbb1x22WXExMRQr1491q1bB8Dzzz9PrVq1iImJ4bHHHkumGeCff/7h3HPPBWDEiBFcc801tGjRgiuuuIK9e/dy2WWXUa9ePWrXrs3XX3+dpGPUqFHUrl2bmJgYbr75Zvbs2UO1atVISEgAYNeuXcneZyXpGr1zLgHoB8wAVgETnHO/iMhgEWnvFWsOrBaR34CywHPe+guBH0QkHvgvMNQ5tyLEnyF07NqlJr93L0yfDlWq+K3IMMKWX3/9lf79+7Ny5UoqVKjAkCFDiIuLIz4+nu+++46VK1eetM+ePXto1qwZ8fHxXHzxxYwcOTLVYzvn+PHHH3n55ZeTfjTefPNNypUrx8qVK3niiSf46aefUt333nvvZfHixaxYsYI9e/bw7bffAtCtWzf69+9PfHw8Cxcu5Mwzz2Tq1KlMnz6dH3/8kfj4eB544IF0P/dPP/3El19+yezZsylYsCCTJk1i6dKlzJo1i/79+wMQHx/Piy++yLx584iPj+eVV16hePHiXHrppUl6xo4dy7XXXpstTwVBncE5Nw2YlmLdkwHLE4GTqmOcc98BtU9TY/Zw8KCmGl67ViP5mBi/FRlGcryIN1w455xziI09UVMwduxYPvzwQxISEti0aRMrV66keoo5kwsWLEjbtm0BqF+/Pt9//32qx+7UqVNSmcTIe8GCBTz88MMAxMTEUKNGjVT3nT17Ni+//DKHDh1i+/bt1K9fn0aNGrF9+3batWsH6KAjgFmzZnHLLbdQsGBBAEoFkbfq8ssvp2TJkoD+IA0cOJAFCxaQK1cu1q9fz/bt25kzZw5du3ZNOl7ia+/evXnjjTe4+uqrGTVqFB9//HG65wsF1sIIcOwY3HAD/O9/MG4ctGjhtyLDCHsKFy6ctLxmzRpef/11fvzxR0qUKEGPHj1S7eudL1++pOXcuXOnWW2RP3/+dMukxoEDB+jXrx9Lly6lQoUKPP7445kaVZwnTx6OHz8OcNL+gZ97zJgx7Nmzh6VLl5InTx4qVqx4yvM1a9aMfv36MXfuXPLmzcsFF1yQYW2ZIWe1nqSGc9qFctIkeP11ndzbMIwMsXfvXooWLUqxYsXYvHkzM2bMCPk5Lr30UiZMmADAihUrUq0aOnjwILly5aJ06dLs27ePL774AoCSJUtSpkwZpk6dCqh5HzhwgNatWzNy5EgOHjwIwM6dOwGoUqUKS5YsAWDixDT7jrBnzx7OPPNM8uTJw3fffcfGjRsBuOyyyxg/fnzS8RJfAXr06EH37t25+eabT+t6ZAQz+mee0T7yAwfC3Xf7rcYwIpJ69epRvXp1LrjgAnr27Mmll14a8nPcfffdbNy4kerVq/P0009TvXp1ihcvnqzMGWecwU033UT16tVp27YtDRs2TNr26aef8sorr1C7dm0aN27Mtm3buPrqq2nTpg2xsbHUqVOH1157DYAHH3yQ119/nXr16rFr1640Nd14440sXLiQWrVqMW7cOM477zxAq5YeeughmjZtSp06dXjwwQeT9unevTt79uyha9euobw8pyTd7pXZTbZ2r/zgA+jbF3r21Im9bUCUEWaEU/dKv0lISCAhIYECBQqwZs0aLr/8ctasWeNbF8fMMm7cOGbMmMGoUaMyfYyMdq+MrCsUSqZMgdtv1142I0aYyRtGmLN//35atmxJQkICzjmGDx8ecSZ/xx13MGvWrKSeN9lFZF2lULFwIXTtCvXra8rhDKT7NAzDH0qUKJFUbx6pvPvuu76cN+fV0a9apblrKlaEb77R6QANwzCimJxl9Bs3whVXQL58OkNUmTJ+KzIMw8hyck7Vze7dWh+/ezf8979QrZrfigzDMLKFnGH0hw5Bhw6wejVMmwZ16/qtyDAMI9uI/qqbY8egRw+YPx9Gj4ZWrfxWZBgRQ4sWLU4a/DRs2DDuSGemtSJe29emTZvo0qVLqmWaN29Oel2phw0bxoEDB5LeX3nllezevTsY6UYA0W30zsG998IXX8Crr2p+ecMwgqZbt26MGzcu2bpx48bRLcj/pbPOOuuUI0vTI6XRT5s2jRIlSmT6eNmNcy4plYKfRLfRv/ACvP02DBgAXlY5w4hU/MhS3KVLF7755pukSUbWrVvHpk2baNKkSVK/9nr16lGrVi0mT5580v7r1q2jZs2agKYnuP7667nwwgvp2LFjUtoB0P7liSmOn3rqKQDeeOMNNm3aRIsWLWjh5Z+qUqUK27dvB+DVV1+lZs2a1KxZMynF8bp167jwwgvp06cPNWrU4PLLL092nkSmTp1Kw4YNqVu3Lq1atWLLli2A9tW/+eabqVWrFrVr105KofDtt99Sr149YmJiaNmyJaD5+YcOHZp0zJo1a7Ju3TrWrVvH+eefT8+ePalZsybr169P9fMBLF68mEsuuYSYmBgaNGjAvn37aNq0abL0y40bNyY+Pv7UX1Q6RG8d/ahR8Nhj0L07vPii32oMIyIpVaoUDRo0YPr06XTo0IFx48Zx3XXXISIUKFCAr776imLFirF9+3YaNWpE+/bt05zT9N1336VQoUKsWrWK5cuXU69evaRtzz33HKVKleLYsWO0bNmS5cuXc8899/Dqq68yd+5cSqeYF2LJkiWMGjWKH374AeccDRs2pFmzZpQsWZI1a9YwduxYPvjgA6677jq++OILevTokWz/xo0bs2jRIkSEESNG8NJLL/HKK6/wzDPPULx4cVas0Gzqu3btYtu2bfTp04f58+dTtWrVZHlr0mLNmjWMHj2aRo0apfn5LrjgArp27cr48eO56KKL2Lt3LwULFuTWW2/lo48+YtiwYfz2228cOnSImNPMphudRv/NN9CnD7RuDSNHQg6b+caITvzKUpxYfZNo9B9++CGg1RKPPvoo8+fPJ1euXGzcuJEtW7ZQrly5VI8zf/587rnnHgBq165N7donMphPmDCB999/n4SEBDZv3szKlSuTbU/JggUL6NixY1ImyU6dOvH999/Tvn17qlatSp06dYDkaY4D2bBhA127dmXz5s0cOXKEqlWrApq2OLCqqmTJkkydOpWmTZsmlQkmlXHlypWTTD6tzycilC9fnosuugiAYsWKAXDttdfyzDPP8PLLLzNy5Eh69eqV7vnSI/oc8Icf4NprNZ/8F19on3nDMDJNhw4dmD17NkuXLuXAgQPUr18f0CRh27ZtY8mSJSxbtoyyZctmKiXwn3/+ydChQ5k9ezbLly/nqquuytRxEklMcQxppzm+++676devHytWrGD48OGnncoYkqczDkxlnNHPV6hQIVq3bs3kyZOZMGEC3bt3z7C2lESX0a9eDVddBeXLazfKokX9VmQYEU+RIkVo0aIFt9xyS7JG2MQUvXnz5mXu3Ln89ddfpzxO06ZN+eyzzwD4+eefWb58OaApjgsXLkzx4sXZsmUL06dPT9qnaNGi7Nu376RjNWnShEmTJnHgwAH+/fdfvvrqK5o0aRL0Z9qzZw8VKlQAYPTo0UnrW7duzdtvv530fteuXTRq1Ij58+fz559/AslTGS9duhSApUuXJm1PSVqf7/zzz2fz5s0sXrwYgH379iX9KPXu3Zt77rmHiy66KGmSk9Mheox+82Yd9Zorl456LVvWb0WGETV069aN+Pj4ZEbfvXt34uLiqFWrFmPGjEl3Eo077riD/fv3c+GFF/Lkk08mPRnExMRQt25dLrjgAm644YZkKY779u1LmzZtkhpjE6lXrx69evWiQYMGNGzYkN69e1M3A+NjBg0axLXXXkv9+vWT1f8//vjj7Nq1i5o1axITE8PcuXMpU6YM77//Pp06dSImJiYpvXDnzp3ZuXMnNWrU4K233uI///lPqudK6/Ply5eP8ePHc/fddxMTE0Pr1q2TIv369etTrFixkOWsj540xbt3w403wlNPQWyqmToNI+KwNMU5k02bNtG8eXN+/fVXcqXSxpjRNMXRE9GXKAFTp5rJG4YR0YwZM4aGDRvy3HPPpWrymSE6e90YhmFEKD179qRnz54hPWb0RPSGEaWEW/Wq4S+ZuR/M6A0jjClQoAA7duwwszcANfkdO3ZQoECBDO1nVTeGEcZUrFiRDRs2sG3bNr+lGGFCgQIFqFixYob2MaM3jDAmb968SSMyDSOzWNWNYRhGlGNGbxiGEeWY0RuGYUQ5YTcyVkS2AadOmnFqSgPbQyQnlJiujGG6MobpyhjRqKuyc65MahvCzuhPFxGJS2sYsJ+YroxhujKG6coYOU2XVd0YhmFEOWb0hmEYUU40Gv37fgtIA9OVMUxXxjBdGSNH6Yq6OnrDMAwjOdEY0RuGYRgBmNEbhmFEORFj9CLSRkRWi8haERmYyvb8IjLe2/6DiFQJ2PaIt361iFyRzbruF5GVIrJcRGaLSOWAbcdEZJn3NyWbdfUSkW0B5+8dsO0mEVnj/d2UzbpeC9D0m4jsDtiWlddrpIhsFZGf09guIvKGp3u5iNQL2JaV1ys9Xd09PStEZKGIxARsW+etXyYimZi27bR0NReRPQHf15MB2055D2SxrgcDNP3s3VOlvG1Zeb3OFpG5nhf8IiL3plIm6+4x51zY/wG5gd+BakA+IB6onqLMncB73vL1wHhvubpXPj9Q1TtO7mzU1QIo5C3fkajLe7/fx+vVC3grlX1LAX94ryW95ZLZpStF+buBkVl9vbxjNwXqAT+nsf1KYDogQCPgh6y+XkHquiTxfEDbRF3e+3VAaZ+uV3Pg69O9B0KtK0XZdsCcbLpe5YF63nJR4LdU/iez7B6LlIi+AbDWOfeHc+4IMA7okKJMByBxOveJQEsREW/9OOfcYefcn8Ba73jZoss5N9c5d8B7uwjIWH7RLNJ1Cq4AvnPO7XTO7QK+A9r4pKsbMDZE5z4lzrn5wM5TFOkAjHHKIqCEiJQna69Xurqccwu980L23V/BXK+0OJ17M9S6svP+2uycW+ot7wNWARVSFMuyeyxSjL4CsD7g/QZOvkhJZZxzCcAe4Iwg981KXYHciv5iJ1JAROJEZJGIXBMiTRnR1dl7RJwoImdncN+s1IVXxVUVmBOwOquuVzCkpT0rr1dGSXl/OWCmiCwRkb4+6LlYROJFZLqI1PDWhcX1EpFCqFl+EbA6W66XaLVyXeCHFJuy7B6zfPTZhIj0AGKBZgGrKzvnNopINWCOiKxwzv2eTZKmAmOdc4dF5Db0aeiybDp3MFwPTHTOHQtY5+f1CmtEpAVq9I0DVjf2rteZwHci8qsX8WYHS9Hva7+IXAlMAs7LpnMHQzvgf865wOg/y6+XiBRBf1zuc87tDeWxT0WkRPQbgbMD3lf01qVaRkTyAMWBHUHum5W6EJFWwGNAe+fc4cT1zrmN3usfwDz0Vz5bdDnndgRoGQHUD3bfrNQVwPWkeKzOwusVDGlpz8rrFRQiUhv9Djs4ZB1ESAAAAcVJREFU53Ykrg+4XluBrwhdlWW6OOf2Ouf2e8vTgLwiUpowuF4ep7q/suR6iUhe1OQ/dc59mUqRrLvHsqLhIdR/6JPHH+ijfGIDTo0UZe4ieWPsBG+5BskbY/8gdI2xweiqizY+nZdifUkgv7dcGlhDiBqlgtRVPmC5I7DInWj4+dPTV9JbLpVdurxyF6ANY5Id1yvgHFVIu3HxKpI3lP2Y1dcrSF2V0HanS1KsLwwUDVheCLTJRl3lEr8/1DD/9q5dUPdAVunythdH6/ELZ9f18j77GGDYKcpk2T0Wsoub1X9oi/RvqGk+5q0bjEbJAAWAz72b/kegWsC+j3n7rQbaZrOuWcAWYJn3N8VbfwmwwrvRVwC3ZrOuF4BfvPPPBS4I2PcW7zquBW7OTl3e+0HAkBT7ZfX1GgtsBo6idaC3ArcDt3vbBXjb070CiM2m65WerhHAroD7K85bX827VvHe9/xYNuvqF3B/LSLghyi1eyC7dHlleqEdNAL3y+rr1RhtA1ge8F1dmV33mKVAMAzDiHIipY7eMAzDyCRm9IZhGFGOGb1hGEaUY0ZvGIYR5ZjRG4ZhRDlm9IZhGFGOGb1hGEaU8/8aIgTzrKca2wAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "acc = history.history['acc']\n", + "val_acc = history.history['val_acc']\n", + "loss = history.history['loss']\n", + "val_loss = history.history['val_loss']\n", + "\n", + "epochs = range(len(acc))\n", + "\n", + "plt.plot(epochs, acc, 'r', label='Training accuracy')\n", + "plt.plot(epochs, val_acc, 'b', label='Validation accuracy')\n", + "plt.title('Training and validation accuracy')\n", + "plt.legend(loc=0)\n", + "plt.figure()\n", + "\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Submission Instructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Now click the 'Submit Assignment' button above." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# When you're done or would like to take a break, please run the two cells below to save your work and close the Notebook. This will free up resources for your fellow learners. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "\n", + "IPython.notebook.save_checkpoint();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "IPython.notebook.session.delete();\n", + "window.onbeforeunload = null\n", + "setTimeout(function() { window.close(); }, 1000);" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "Exercise 7 - Question.ipynb", + "provenance": [] + }, + "coursera": { + "course_slug": "convolutional-neural-networks-tensorflow", + "graded_item_id": "csg1x", + "launcher_item_id": "GpKYz" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/Week 4/Exercise_4_Multi_class_classifier_Question-FINAL.ipynb b/Course 2: Convolutional Neural Networks in Tensorflow/Week 4/Exercise_4_Multi_class_classifier_Question-FINAL.ipynb new file mode 100644 index 0000000..82d89bb --- /dev/null +++ b/Course 2: Convolutional Neural Networks in Tensorflow/Week 4/Exercise_4_Multi_class_classifier_Question-FINAL.ipynb @@ -0,0 +1,349 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "wYtuKeK0dImp" + }, + "outputs": [], + "source": [ + "# ATTENTION: Please do not alter any of the provided code in the exercise. Only add your own code where indicated\n", + "# ATTENTION: Please do not add or remove any cells in the exercise. The grader will check specific cells based on the cell position.\n", + "# ATTENTION: Please use the provided epoch values when training.\n", + "\n", + "import csv\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "from tensorflow.keras.models import Sequential\n", + "from tensorflow.keras.layers import Conv2D, Dropout, MaxPooling2D, Flatten, Dense\n", + "from tensorflow.keras.optimizers import RMSprop\n", + "from tensorflow.keras.callbacks import Callback\n", + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "from os import getcwd" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "4kxw-_rmcnVu" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(27455, 28, 28)\n", + "(27455,)\n", + "(7172, 28, 28)\n", + "(7172,)\n" + ] + } + ], + "source": [ + "def get_data(filename):\n", + " # You will need to write code that will read the file passed\n", + " # into this function. The first line contains the column headers\n", + " # so you should ignore it\n", + " # Each successive line contians 785 comma separated values between 0 and 255\n", + " # The first value is the label\n", + " # The rest are the pixel values for that picture\n", + " # The function will return 2 np.array types. One with all the labels\n", + " # One with all the images\n", + " #\n", + " # Tips: \n", + " # If you read a full line (as 'row') then row[0] has the label\n", + " # and row[1:785] has the 784 pixel values\n", + " # Take a look at np.array_split to turn the 784 pixels into 28x28\n", + " # You are reading in strings, but need the values to be floats\n", + " # Check out np.array().astype for a conversion\n", + " with open(filename) as training_file:\n", + " # Your code starts here\n", + " all_lines = training_file.readlines()[1:] #remove header\n", + " length = len(all_lines)\n", + " labels = np.zeros(length)\n", + " images = np.zeros((length, 28, 28))\n", + " for idx, line in enumerate(all_lines):\n", + " line = line.strip().split(',')\n", + " if not line:\n", + " continue\n", + " #add the label\n", + " labels[idx] = int(line[0])\n", + " #add the image\n", + " image = np.asarray(line[1:], dtype=np.float32)\n", + " image = np.array_split(image, 28)\n", + " images[idx, :, :] = image\n", + " # Your code ends here\n", + " return images, labels\n", + "\n", + "path_sign_mnist_train = f\"{getcwd()}/../tmp2/sign_mnist_train.csv\"\n", + "path_sign_mnist_test = f\"{getcwd()}/../tmp2/sign_mnist_test.csv\"\n", + "training_images, training_labels = get_data(path_sign_mnist_train)\n", + "testing_images, testing_labels = get_data(path_sign_mnist_test)\n", + "\n", + "# Keep these\n", + "print(training_images.shape)\n", + "print(training_labels.shape)\n", + "print(testing_images.shape)\n", + "print(testing_labels.shape)\n", + "\n", + "# Their output should be:\n", + "# (27455, 28, 28)\n", + "# (27455,)\n", + "# (7172, 28, 28)\n", + "# (7172,)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "awoqRpyZdQkD" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(27455, 28, 28, 1)\n", + "(7172, 28, 28, 1)\n" + ] + } + ], + "source": [ + "# In this section you will have to add another dimension to the data\n", + "# So, for example, if your array is (10000, 28, 28)\n", + "# You will need to make it (10000, 28, 28, 1)\n", + "# Hint: np.expand_dims\n", + "\n", + "training_images = np.expand_dims(training_images, axis=3) # Your Code Here\n", + "testing_images = np.expand_dims(testing_images, axis=3) # Your Code Here\n", + "\n", + "# Create an ImageDataGenerator and do Image Augmentation\n", + "train_datagen = ImageDataGenerator(\n", + " rescale = 1./255,\n", + " rotation_range=40,\n", + " width_shift_range=0.2,\n", + " height_shift_range=0.2,\n", + " shear_range=0.2,\n", + " zoom_range=0.2,\n", + " horizontal_flip=True,\n", + " fill_mode='nearest') # Your Code Here\n", + "\n", + "validation_datagen = ImageDataGenerator(rescale = 1./255)# Your Code Here)\n", + " \n", + "# Keep These\n", + "print(training_images.shape)\n", + "print(testing_images.shape)\n", + " \n", + "# Their output should be:\n", + "# (27455, 28, 28, 1)\n", + "# (7172, 28, 28, 1)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "Rmb7S32cgRqS" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/2\n", + "858/858 - 65s - loss: 2.5862 - acc: 0.2062 - val_loss: 1.3600 - val_acc: 0.5693\n", + "Epoch 2/2\n", + "858/858 - 61s - loss: 1.7896 - acc: 0.4302 - val_loss: 0.9229 - val_acc: 0.6790\n" + ] + }, + { + "data": { + "text/plain": [ + "[129.98964610708734, 0.54099274]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Define the model\n", + "# Use no more than 2 Conv2D and 2 MaxPooling2D\n", + "model = Sequential([\n", + " # Your Code Here\n", + " Conv2D(64, (3, 3), activation='relu', input_shape=(28, 28, 1)),\n", + " MaxPooling2D(2, 2),\n", + " Conv2D(128, (3, 3), activation='relu'),\n", + " MaxPooling2D(2, 2),\n", + " Flatten(),\n", + " Dropout(0.5),\n", + " Dense(512, activation='relu'),\n", + " Dense(25, activation='softmax')\n", + "])\n", + "\n", + "# Compile Model. \n", + "model.compile(optimizer = RMSprop(lr=0.001), \n", + " loss = 'sparse_categorical_crossentropy', \n", + " metrics = ['acc']) # Your Code Here)\n", + "\n", + "# Train the Model\n", + "train_generator = train_datagen.flow(training_images, training_labels)\n", + "validation_generator = validation_datagen.flow(testing_images, testing_labels)\n", + "\n", + "history = model.fit_generator(train_generator,\n", + " epochs=2,\n", + " verbose=2,\n", + " validation_data=validation_generator)# Your Code Here (set 'epochs' = 2))\n", + "\n", + "model.evaluate(testing_images, testing_labels, verbose=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": {}, + "colab_type": "code", + "id": "_Q3Zpr46dsij" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deXzU1b3/8dcHwir7VpBdhcguEIEWEBChqECuK+CCqIjSorXWtv6Uq6hXa6/WopXLLUWs2ipytRpwwWrVBy5VQBRUEIIYJey7QEAJnN8fZyaZCTPJJEwymcn7+XjkwSxnvnO+k/DON5/vOedrzjlERCT5VUt0B0REJD4U6CIiKUKBLiKSIhToIiIpQoEuIpIiFOgiIilCgZ7CzKy6mR0ws3bxbJtIZnaamcV9rK2ZnWNmOSH315rZ4FjaluG95prZ7WV9vUg0aYnugBQyswMhd+sC3wNHA/evd879vTTbc84dBerFu21V4JxLj8d2zGwycIVzbmjItifHY9siRSnQKxHnXEGgBo4AJzvn3ozW3szSnHP5FdE3kZLo5zHxVHJJImb2X2b2nJk9a2b7gSvM7Mdm9qGZ7TWzLWb2qJnVCLRPMzNnZh0C9/8WeP41M9tvZv82s46lbRt4/lwzW2dm+8zsT2b2vplNitLvWPp4vZmtN7M9ZvZoyGurm9kfzWyXmW0ARhXz+dxhZvOLPDbLzB4O3J5sZmsC+/NV4Og52rZyzWxo4HZdM3s60LcvgL5F2k43sw2B7X5hZmMDj/cAHgMGB8pZO0M+2xkhr78hsO+7zOwlM2sVy2dTms852B8ze9PMdpvZVjP7Tcj7/GfgM/nOzJab2cmRyltm9l7w+xz4PJcE3mc3MN3MOpnZ24H32Bn43BqGvL59YB93BJ5/xMxqB/rcJaRdKzPLM7Om0fZXInDO6asSfgE5wDlFHvsv4AdgDP6XcR3gTKA//q+tU4B1wLRA+zTAAR0C9/8G7AQygBrAc8DfytC2BbAfyAw8dwtwBJgUZV9i6WMW0BDoAOwO7jswDfgCaAM0BZb4H9uI73MKcAA4KWTb24GMwP0xgTYGnA0cAnoGnjsHyAnZVi4wNHD7IeAdoDHQHlhdpO2lQKvA9+SyQB9+FHhuMvBOkX7+DZgRuD0y0MczgNrA/wBvxfLZlPJzbghsA34B1AIaAP0Cz/0/YCXQKbAPZwBNgNOKftbAe8Hvc2Df8oGpQHX8z2NnYDhQM/Bz8j7wUMj+fB74PE8KtB8YeG4OcF/I+/wKeDHR/w+T7SvhHdBXlG9M9EB/q4TX3Qr8X+B2pJD+35C2Y4HPy9D2GuDdkOcM2EKUQI+xjwNCnv8HcGvg9hJ86Sn43HlFQ6bItj8ELgvcPhdYW0zbl4GfB24XF+jfhn4vgJ+Fto2w3c+B8wO3Swr0J4H7Q55rgD9v0qakz6aUn/OVwLIo7b4K9rfI47EE+oYS+nBx8H2BwcBWoHqEdgOBrwEL3P8UuDDe/69S/Usll+SzMfSOmZ1uZq8E/oT+DrgHaFbM67eG3M6j+BOh0dqeHNoP5/8H5kbbSIx9jOm9gG+K6S/AM8CEwO3LAveD/RhtZh8FygF78UfHxX1WQa2K64OZTTKzlYGywV7g9Bi3C37/CrbnnPsO2AO0DmkT0/eshM+5LT64IynuuZIU/XlsaWYLzGxToA9/LdKHHOdPwIdxzr2PP9ofZGbdgXbAK2XsU5WlQE8+RYfs/Rl/RHiac64BcCf+iLk8bcEfQQJgZkZ4ABV1In3cgg+CoJKGVS4AzjGz1viS0DOBPtYBngd+hy+HNAL+GWM/tkbrg5mdAszGlx2aBrb7Zch2SxpiuRlfxglurz6+tLMphn4VVdznvBE4Ncrroj13MNCnuiGPtSzSpuj+/R4/OqtHoA+TivShvZlVj9KPp4Ar8H9NLHDOfR+lnUShQE9+9YF9wMHASaXrK+A9Xwb6mNkYM0vD12Wbl1MfFwA3m1nrwAmy3xbX2Dm3FV8W+Cu+3JIdeKoWvq67AzhqZqPxtd5Y+3C7mTUyP05/Wshz9fChtgP/u+06/BF60DagTejJySKeBa41s55mVgv/C+dd51zUv3iKUdznvBBoZ2bTzKyWmTUws36B5+YC/2Vmp5p3hpk1wf8i24o/+V7dzKYQ8sunmD4cBPaZWVt82Sfo38Au4H7zJ5rrmNnAkOefxpdoLsOHu5SSAj35/Qq4Cn+S8s/4k5flyjm3DRgHPIz/D3oq8An+yCzefZwN/Av4DFiGP8ouyTP4mnhBucU5txf4JfAi/sTixfhfTLG4C/+XQg7wGiFh45xbBfwJWBpokw58FPLaN4BsYJuZhZZOgq9fjC+NvBh4fTvg8hj7VVTUz9k5tw8YAVyE/yWzDhgSePpB4CX85/wd/gRl7UAp7TrgdvwJ8tOK7FskdwH98L9YFgIvhPQhHxgNdMEfrX+L/z4En8/Bf5+/d859UMp9FwpPQIiUWeBP6M3Axc65dxPdH0leZvYU/kTrjET3JRlpYpGUiZmNwo8oOYQf9nYEf5QqUiaB8xGZQI9E9yVZqeQiZTUI2ICvHf8UuEAnsaSszOx3+LHw9zvnvk10f5JVTCWXwNHYI/jJA3Odcw8Uef6PwLDA3bpAi8DZfhERqSAlBnqgProOf0IlF39iaoJzbnWU9jcCvZ1z18S5ryIiUoxYauj9gPXOuQ0A5tfKyMRPf45kAv5Md7GaNWvmOnToEGM3RUQE4OOPP97pnIs4TDiWQG9N+GywXPx6Eccxs/ZAR+CtKM9PAaYAtGvXjuXLl8fw9iIiEmRmUWdLx/uk6Hjg+UhTewGcc3OccxnOuYzmzYubhyIiIqUVS6BvInzacxuiT0sej5/5JiIiFSyWQF8GdDKzjmZWEx/aC4s2MrPT8WtQ/Du+XRQRkViUGOiB6brTgNeBNfhFc74ws3sssJB/wHhgvtPUUxGRhIhppqhz7lXg1SKP3Vnk/oz4dUtEREpLM0VFRFKEAl1EJEVocS4RkXJ24ABkZ8PatbBuHYweDX36xP99FOgiInGQnw/ffFMY2mvXFt7eFDLQ2wyaN1egi4gklHOwc2fk0F6/Ho4cKWzbqBGkp8Pw4dC5s7+dng6nnQZ16pRP/xToIiJFHDrkSyTB0A4N7717C9vVqOEDOj0dxowpDO3OnaFZM380XpEU6CJSJR07Bt9+e3xor1vnHw+dUdO6tQ/q8eMLAzs9Hdq3h7RKlKKVqCsiIvG3Z094aST4b3Y2HD5c2K5+fR/SAwfCNdcUhnanTlCvXuL6XxoKdBFJet9/D199dXxde+1aX/MOql4dTjnFB/XIkYWh3bkztGxZ8SWSeFOgi0hScM6PFolU187J8SWUoB/9yAf1BReEh/Ypp/i6d6pSoItIpfLdd5FDOzsbDh4sbFe3rg/pjAy4/PLC0O7cGRo2TFz/E0mBLiIV7sgR+Prr4+vaa9fC1q2F7apVgw4dfEgPGRI+/O/kk/3zUkiBLiLlwjnYti1yaG/Y4CfiBDVr5sP63HPDQ/vUU6FWrcTtQ7JRoIvICTl4sHBae9Hw/u67wna1avkRIz16wEUXhQ//a9Ikcf1PJQp0ESnR0aPRp7Xn5oa3bdfOh/SVV4aHdtu2fpSJlB8FuogAvkSya1f0ae0//FDYtmFDH9LDhoWH9mmn+ZOVkhgKdJEq5vDh6NPa9+wpbFejhq9hp6fD+eeHT2tv3jz5x2ynIgW6SAo6dsyXQiLVtb/5Jnxa+8kn+6C+9NLwo+0OHSrXtHYpmb5dIkls797o09oPHSpsV6+eD+kf/xgmTQqf1l6/fsK6L3GmQBep5H74Ifq09h07CtsFp7V37gznnBM+Q7JVK5VIqgIFukgl4Bxs3hw5tL/+Onxae4sWPqgzM4+f1l6zZuL2QRJPgS5Sgfbvj3wyct268Gntder4kO7TByZMCJ/W3qhR4vovlZsCXSTO8vOPn9YevL1lS2E7s8Jp7YMHh5+QbN1a09ql9BToImXgHGzfHnla+1dfhU9rb9rUB/VPf3r8tPbatRO3D5J6FOgixcjL80Ed6ao2+/YVtqtZ048Y6dbNL9kaerTdtGni+i9ViwJdqryjR/0lxyIN/9u4Mbxt27Y+pEOXa01P99PdNa1dEk2BLlVGcdPav/++sF2DBj6khww5flr7SSclrv8iJVGgS0o5fNgHdKThf7t3F7ZLSyuc1n7uueHT2lu00JhtSU4KdEk6wWntkYb/FZ3W3qqVD+qLLw4P7Y4dNa1dUo9+pKXS2rs38snIdevCp7WfdJIP6gED4KqrwifbaFq7VCUKdEmoH37wV6+JNPxv+/bCdtWqFU5rP/vs8OF/mtYu4inQpdw55yfURJvWfvRoYdsWLXxYjxkTfqR96qma1i5SEgW6xM3+/dEvRXbgQGG72rV9SPfuDePGhU9rb9w4cf0XSXYxBbqZjQIeAaoDc51zD0RocykwA3DASufcZXHsp1QS+fmQkxN5+N/mzYXtzKB9ex/SAweGD/9r00bT2kXKQ4mBbmbVgVnACCAXWGZmC51zq0PadAL+HzDQObfHzFqUV4el/Dnnl2WNNq39yJHCto0b+5AeMSI8tE891S8wJSIVJ5Yj9H7AeufcBgAzmw9kAqtD2lwHzHLO7QFwzm0/bitS6eTlRb4U2bp1foRJUM2aflJNly5+ydbQ4X/NmiWu/yISLpZAbw2EToDOBfoXadMZwMzex5dlZjjnFhfdkJlNAaYAtGvXriz9lVI6etRPX49U1/722/C2bdr4oA5drjU93ZdONK1dpPKL10nRNKATMBRoAywxsx7Oub2hjZxzc4A5ABkZGa7oRqTsdu+OXNfOzo48rb3ocq2dOmlau0iyiyXQNwFtQ+63CTwWKhf4yDl3BPjazNbhA35ZXHopgA/maNPad+0qbJeW5sdsp6fDqFHhw/9+9CON2RZJVbEE+jKgk5l1xAf5eKDoCJaXgAnAE2bWDF+C2RDPjlYVx47Bpk3Rp7WHXoqsZUsf1BddFB7aHTtCjRqJ2wcRSYwSA905l29m04DX8fXxec65L8zsHmC5c25h4LmRZrYaOAr82jm3K/pWZd++6NPa8/IK2510kg/pfv3gyivDx2w3aJC4/otI5WPOJaaUnZGR4ZYvX56Q964oR45En9a+bVthu2rV/FF16FF2cCTJySerRCIihczsY+dcRqTnNFP0BDkHW7dGDu0NG8KntTdv7sP6/POPn9Zeq1bi9kFEUoMCPUYHDkSf1r5/f2G72rX9iJFeveCSS8JLJE2aJK7/IpL6FOgh8vP9icdIw/82hYzrMfOXHEtP98u1hpZJ2rbVtHYRSYwqF+jOwc6d0S9FFjqtvVEjH9LDhx9/KTJNaxeRyiZlA/3QocjT2teuDZ/WXqOGD+j0dL9ka9Fp7TohKSLJIqkD/dgxP3090vC/b78NvxRZ69Y+qMePDw/t9u11KTIRSQ1JF2Wvvgrz5hVOaz98uPC5+vV9UA8cCNdcEz6tvV69xPVZRKQiJF2gb9oEq1b5oB45Mnz4X8uWKpGISNWVdIF+3XX+S0REwmmAnYhIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpIiYAt3MRpnZWjNbb2a3RXh+kpntMLNPA1+T499VEREpTlpJDcysOjALGAHkAsvMbKFzbnWRps8556aVQx9FRCQGsRyh9wPWO+c2OOd+AOYDmeXbLRERKa1YAr01sDHkfm7gsaIuMrNVZva8mbWNtCEzm2Jmy81s+Y4dO8rQXRERiSZeJ0UXAR2ccz2BN4AnIzVyzs1xzmU45zKaN28ep7cWERGILdA3AaFH3G0CjxVwzu1yzn0fuDsX6Buf7omISKxiCfRlQCcz62hmNYHxwMLQBmbWKuTuWGBN/LooIiKxKHGUi3Mu38ymAa8D1YF5zrkvzOweYLlzbiFwk5mNBfKB3cCkcuyziIhEYM65hLxxRkaGW758eULeW0QkWZnZx865jEjPaaaoiEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIilCgi4ikCAW6iEiKUKCLiKQIBbqISIpQoIuIpAgFuohIRVm/Hv7wB1izplw2n1YuWxURETh2DJYuhYULISsLVq/2j9esCV26xP3tFOgiIvF06BC89ZYP8EWLYOtWqF4dhgyBKVNg7Fjo2LFc3lqBLiJyonbuhFde8SH++uuQlwf16sG550JmJpx3HjRuXO7dUKCLiJTF+vU+wLOy4P33fXmldWu46iof4kOHQq1aFdolBbqISCyC9fCsLF8TD9bDe/aEO+7wId6nD5glrIsKdBGRaA4dgn/9ywd40Xr49dfDmDHlVg8vCwW6iEioSPXw+vV9PXzs2Aqrh5eFAl1EpBLWw8tCgS4iVU9oPTwrq3CiTyWqh5eFAl1EqoZgPTw4PnzbtsJ6+A03+HJKhw6J7uUJiSnQzWwU8AhQHZjrnHsgSruLgOeBM51zy+PWSxGRsti5E15+2Z/ULFoPz8z0/1bSenhZlBjoZlYdmAWMAHKBZWa20Dm3uki7+sAvgI/Ko6MiIjHJzi6cah9aD580yR+FJ0k9vCxiOULvB6x3zm0AMLP5QCawuki7e4HfA7+Oaw9FRIpz7Bh89FFhiAfr4b16JXU9vCxiCfTWwMaQ+7lA/9AGZtYHaOuce8XMoga6mU0BpgC0a9eu9L0VEYEqUQ8vixM+KWpm1YCHgUkltXXOzQHmAGRkZLgTfW8RqUKC9fCsLPjnP1O+Hl4WsQT6JqBtyP02gceC6gPdgXfM/0nTElhoZmN1YlRETkh2duFU+2A9vE0bXw/PzPRH5ClaDy+LWAJ9GdDJzDrig3w8cFnwSefcPqBZ8L6ZvQPcqjAXkVIrrh4+fbovpVSRenhZlBjozrl8M5sGvI4ftjjPOfeFmd0DLHfOLSzvTopICjt0CN58s3C9lG3bIC3NH31PnerXS6mC9fCyiKmG7px7FXi1yGN3Rmk79MS7JSIpTfXwcqGZoiJSMYL18Kws+OCD4+vhQ4f6S7NJmSnQRaR8BOvhwZOaRevhmZnQu7fq4XGkQBeR+AnWw4Pjw7dvVz28AinQReTE7NhRuF5KsB7eoEF4PbxRo0T3skpQoItI6akeXikp0EWkZKH18Kws+PJL/7jq4ZWKAl1EIiuuHv6zn/lJPu3bJ7qXEkKBLiKFgvXw4PjwQ4dUD08iCnSRqm7dusKhhaH18GuuKVwvRfXwpKBAF6lqotXDzzhD9fAkp0AXqQpUD68SFOgiqUr18CpHgS6SSoL18OD4cOegbVvVw6sIBbpIMjt6NHy9lNB6+J13+hA/4wzVw6sIBbpIsolWDx86VPXwKk6BLpIMtm+HV145vh5+3nk+wFUPFxToIpWX6uFSSgp0kcoitB6elQVr1/rHVQ+XGCnQRRIpL6+wHv7yy+H18GnTfDmlXbtE91KShAJdpKKpHi7lRIEuUhHWrvXDCovWw6+91oe46uESBwp0kfIQrR7eu7fq4VJuFOgi8aJ6uCSYAl3kRGzfXrheyhtvhNfDg+ulNGyY6F5KFaFAFymttWvD1w8PrYdnZsJZZ6keLgmhQBcpydGj8OGHhSc1Q+vhd93lSymqh0sloEAXiSS0Hr5okV+KNi0Nhg1TPVwqLQW6SJDq4ZLkFOhStQXr4VlZ8O9/qx4uSU2BLlVLsB4ePKlZtB6emQm9eqkeLklJgS6pLy/Pl1AWLjy+Hn7jjTBmjOrhkhIU6JKaItXDGzYMXy9F9XBJMQp0SR2R6uHt2qkeLlVGTIFuZqOAR4DqwFzn3ANFnr8B+DlwFDgATHHOrY5zX0XChdbDs7L8BSEA+vRRPVyqpBID3cyqA7OAEUAusMzMFhYJ7Gecc/8baD8WeBgYVQ79laouWA8PrpeyYwfUqOHXS7npJl9Oads20b0USYhYjtD7AeudcxsAzGw+kAkUBLpz7ruQ9icBLp6dlCpu+3Z/MnPhwuPr4ZmZMGqU6uEixBborYGNIfdzgf5FG5nZz4FbgJrA2ZE2ZGZTgCkA7TSqQIrz5ZeFU+1D6+GTJ/ujcNXDRY4Tt5OizrlZwCwzuwyYDlwVoc0cYA5ARkaGjuKlkOrhIicslkDfBIQWJdsEHotmPjD7RDolVUS0eviwYaqHi5RBLIG+DOhkZh3xQT4euCy0gZl1cs5lB+6eD2QjEsm2beHjww8fVj1cJE5KDHTnXL6ZTQNexw9bnOec+8LM7gGWO+cWAtPM7BzgCLCHCOUWqcK+/LJwqn1oPfy66wrHh9eokeheiiQ9cy4xpeyMjAy3fPnyhLy3lLOjR31wB09qhtbDMzN9KUX1cJEyMbOPnXMZkZ7TTFGJj7w8+Oc/fYgXrYf/4hd+vRTVw0XKlQJdyi5aPfz88/1RuOrhIhVKgS6lE6yHZ2X5YYaqh4tUGgp0KV6wHh48qRlaD58xw4d4z56qh4tUAgp0OZ7q4SJJSYEunurhCXPkyBFyc3M5fPhworsilUjt2rVp06YNNUpRwlSgV1XOha+XEqyHt28PU6YUrpeieni5y83NpX79+nTo0AFT6UoA5xy7du0iNzeXjh07xvw6BXpVEloPz8qC7MCEXtXDE+rw4cMKcwljZjRt2pQdO3aU6nUK9FR38GD4eik7dxbWw2++2R+Jt2mT6F5WeQpzKaosPxMK9FS0bVv4+uGh9fDMTPjpT1UPF0lBCvRUEKyHB4cWqh4upbBr1y6GDx8OwNatW6levTrNmzcHYOnSpdSMYd35q6++mttuu4309PSobWbNmkWjRo24/PLL49NxOY4CPVkdPQoffFB4UjNYD+/bF+6+24e46uESg6ZNm/Lpp58CMGPGDOrVq8ett94a1sY5h3OOatWqRdzGE088UeL7/PznPz/xzlaw/Px80tKSJyaTp6eienhVcPPNEAjXuDnjDJg5s9QvW79+PWPHjqV379588sknvPHGG9x9992sWLGCQ4cOMW7cOO68804ABg0axGOPPUb37t1p1qwZN9xwA6+99hp169YlKyuLFi1aMH36dJo1a8bNN9/MoEGDGDRoEG+99Rb79u3jiSee4Cc/+QkHDx5k4sSJrFmzhq5du5KTk8PcuXM544wzwvp211138eqrr3Lo0CEGDRrE7NmzMTPWrVvHDTfcwK5du6hevTr/+Mc/6NChA/fffz/PPvss1apVY/To0dx3330FfT7jjDPYunUrgwYNYv369cydO5eXX36Zffv2Ua1aNV588UX+4z/+g71795Kfn8/999/P6NGjAf+L7I9//CNmRp8+fZg5cya9e/dm3bp1pKWlsWfPHvr27Vtwv7wp0Cu7YD08KwvefNPXwxs1Cl8/vEGDRPdSUtSXX37JU089RUaGX9zvgQceoEmTJuTn5zNs2DAuvvhiunbtGvaaffv2MWTIEB544AFuueUW5s2bx2233Xbctp1zLF26lIULF3LPPfewePFi/vSnP9GyZUteeOEFVq5cSZ8+fSL26xe/+AV33303zjkuu+wyFi9ezLnnnsuECROYMWMGY8aM4fDhwxw7doxFixbx2muvsXTpUurUqcPu3btL3O9PPvmETz/9lMaNG3PkyBFeeuklGjRowPbt2xk4cCCjR49m5cqV/P73v+eDDz6gSZMm7N69m4YNGzJw4EAWL17M6NGjefbZZ7nkkksq7ChfgV7ZlFQPz8yEwYNVD09VZTiSLk+nnnpqQZgDPPvsszz++OPk5+ezefNmVq9efVyg16lTh3PPPReAvn378u6770bc9oUXXljQJicnB4D33nuP3/72twD06tWLbt26RXztv/71Lx588EEOHz7Mzp076du3LwMGDGDnzp2MGTMG8BNzAN58802uueYa6tSpA0CTJk1K3O+RI0fSuHFjwP/iue2223jvvfeoVq0aGzduZOfOnbz11luMGzeuYHvBfydPnsyjjz7K6NGjeeKJJ3j66adLfL94UaBXBsF6eDDEi9bDMzOhRw/Vw6XCnXTSSQW3s7OzeeSRR1i6dCmNGjXiiiuuiDi7NfQkavXq1cnPz4+47Vq1apXYJpK8vDymTZvGihUraN26NdOnTy/TLNu0tDSOHTsGcNzrQ/f7qaeeYt++faxYsYK0tDTatGlT7PsNGTKEadOm8fbbb1OjRg1OP/30UvetrCKf4ZDyd/AgvPgiXH01tGzpR6E8+iiccgr8z//Axo2wfDn853/q5KZUCt999x3169enQYMGbNmyhddffz3u7zFw4EAWLFgAwGeffcbq1auPa3Po0CGqVatGs2bN2L9/Py+88AIAjRs3pnnz5ixatAjwIZ2Xl8eIESOYN28ehw4dAigouXTo0IGPP/4YgOeffz5qn/bt20eLFi1IS0vjjTfeYNMmf0nls88+m+eee65ge6GlnCuuuILLL7+cq6+++oQ+j9LSEXpFUj1cklifPn3o2rUrp59+Ou3bt2fgwIFxf48bb7yRiRMn0rVr14KvhkXmTDRt2pSrrrqKrl270qpVK/r371/w3N///neuv/567rjjDmrWrMkLL7xQUO/OyMigRo0ajBkzhnvvvZdf//rXjBs3jtmzZxeUiCK58sorGTNmDD169KBfv3506tQJ8CWh3/zmN5x11lmkpaXRt29fHn/8cQAuv/xy7rnnHsaNGxf3z6g4ugRdeQqth2dlwUcfFdbDMzNVDxcA1qxZQ5cuXRLdjUohPz+f/Px8ateuTXZ2NiNHjiQ7Ozuphg4CzJ8/n9dffz2m4ZzFifSzoUvQVaTQenhWFqxf7x9XPVykRAcOHGD48OHk5+fjnOPPf/5z0oX51KlTefPNN1m8eHGFv3dyfVKV1cGDfv3w4PjwXbv8UffZZ8Mtt/j1wzU+XKREjRo1KqhrJ6vZs2cn7L0V6GW1dWvheimh9RI2QyAAAAvISURBVPDQ9VJUDxeRCqRAj1Vx9XCNDxeRSkCBXpxo9fCMDNXDRaTSUaAXpXq4iCQpTSwCXw//y19g9Gho2hQuvNAH+qhRsGCBXwRr8WKYOlVhLiln2LBhx00SmjlzJlOnTi32dfXq1QNg8+bNXHzxxRHbDB06lJKGJ8+cOZO8vLyC++eddx579+6NpetSRNUMdOdg9Wr43e/gxz+Gk0/2dfAvvoAbboC33oLt2+Fvf4NLLtHJTUlpEyZMYP78+WGPzZ8/nwkTJsT0+pNPPrnYmZYlKRror776Ko0aNSrz9iqac65gCYFEqzqBnp8PS5bArbdC587QrRvcfrt//O67YeVK2LDBL440bJhObkpC3HwzDB0a36+bby7+PS+++GJeeeUVfvjhBwBycnLYvHkzgwcPLhgX3qdPH3r06EFWVtZxr8/JyaF79+6An5Y/fvx4unTpwgUXXFAw3R78+OyMjAy6devGXXfdBcCjjz7K5s2bGTZsGMOGDQP8lPydO3cC8PDDD9O9e3e6d+/OzMDCZTk5OXTp0oXrrruObt26MXLkyLD3CVq0aBH9+/end+/enHPOOWzbtg3wY92vvvpqevToQc+ePQuWDli8eDF9+vShV69eBRf8mDFjBg899FDBNrt3705OTg45OTmkp6czceJEunfvzsaNGyPuH8CyZcv4yU9+Qq9evejXrx/79+/nrLPOKliDHvzywytXriz+GxWD1K6hR6qH16yperhIiCZNmtCvXz9ee+01MjMzmT9/PpdeeilmRu3atXnxxRdp0KABO3fuZMCAAYwdOzbq9S5nz55N3bp1WbNmDatWrQpb/va+++6jSZMmHD16lOHDh7Nq1SpuuukmHn74Yd5++22aNWsWtq2PP/6YJ554go8++gjnHP3792fIkCE0btyY7Oxsnn32Wf7yl79w6aWX8sILL3DFFVeEvX7QoEF8+OGHmBlz587lv//7v/nDH/7AvffeS8OGDfnss88A2LNnDzt27OC6665jyZIldOzYMaYldrOzs3nyyScZMGBA1P07/fTTGTduHM899xxnnnkm3333HXXq1OHaa6/lr3/9KzNnzmTdunUcPnyYXr16ler7FknqBXpwfHhwvZTvv9f4cEkaiVo9N1h2CQZ6cE0S5xy33347S5YsoVq1amzatIlt27bRsmXLiNtZsmQJN910EwA9e/akZ8+eBc8tWLCAOXPmkJ+fz5YtW1i9enXY80W99957XHDBBQUrH1544YW8++67jB07lo4dOxZc9CJ0+d1Qubm5jBs3ji1btvDDDz/QsWNHwC+nG1piaty4MYsWLeKss84qaBPLErvt27cvCPNo+2dmtGrVijPPPBOABoHsueSSS7j33nt58MEHmTdvHpMmTSrx/WKR/IHuHKxZEz4+HKBDB18Pz8yEQYNUQhEpRmZmJr/85S9ZsWIFeXl59O3bF/CLXe3YsYOPP/6YGjVq0KFDhzItVfv111/z0EMPsWzZMho3bsykSZPKtJ2g4NK74JffjVRyufHGG7nlllsYO3Ys77zzDjNmzCj1+4QusQvhy+yGLrFb2v2rW7cuI0aMICsriwULFsRtdmxy1tCD9fBf/Sq8Hn70KNx7L6xapXq4SCnUq1ePYcOGcc0114SdDA0uHVujRg3efvttvvnmm2K3c9ZZZ/HMM88A8Pnnn7Nq1SrAL7170kkn0bBhQ7Zt28Zrr71W8Jr69euzf//+47Y1ePBgXnrpJfLy8jh48CAvvvgigwcPjnmf9u3bR+vWrQF48sknCx4fMWIEs2bNKri/Z88eBgwYwJIlS/j666+B8CV2V6xYAcCKFSsKni8q2v6lp6ezZcsWli1bBsD+/fsL1n6fPHkyN910E2eeeWbBxTROVEyBbmajzGytma03s+OuJWVmt5jZajNbZWb/MrP2celdJI8/7tcPHzIEHnsMTjsNZs+G3FxYtgymT9dkH5EymDBhAitXrgwL9Msvv5zly5fTo0cPnnrqqRIv1jB16lQOHDhAly5duPPOOwuO9Hv16kXv3r05/fTTueyyy8KW3p0yZQqjRo0qOCka1KdPHyZNmkS/fv3o378/kydPpnfv3jHvz4wZM7jkkkvo27dvWH1++vTp7Nmzh+7du9OrVy/efvttmjdvzpw5c7jwwgvp1atXwbK3F110Ebt376Zbt2489thjdO7cOeJ7Rdu/mjVr8txzz3HjjTfSq1cvRowYUXDk3rdvXxo0aBDXNdNLXD7XzKoD64ARQC6wDJjgnFsd0mYY8JFzLs/MpgJDnXPFLgRc5uVzFy+Gv//dXxB51CioX7/02xCpRLR8btW0efNmhg4dypdffkm1apGPrUu7fG4sR+j9gPXOuQ3OuR+A+UBmaAPn3NvOueBA0g+B8hs6MmoUPP20Hx+uMBeRJPTUU0/Rv39/7rvvvqhhXhaxnBRtDWwMuZ8L9I/SFuBa4LVinhcRqdImTpzIxIkT477duI5yMbMrgAxgSJTnpwBTANq1axfPtxZJas65qGO7pWoqy9XkYjnW3wS0DbnfJvBYGDM7B7gDGOuc+z5KB+c45zKccxnNmzcvdWdFUlHt2rXZtWtXmf4DS2pyzrFr1y5q165dqtfFcoS+DOhkZh3xQT4euCy0gZn1Bv4MjHLObS9VD0SquDZt2pCbm8uOHTsS3RWpRGrXrk2bUs5kLzHQnXP5ZjYNeB2oDsxzzn1hZvcAy51zC4EHgXrA/wX+bPzWOTe2tDsgUhXVqFGjYIaiyImIqYbunHsVeLXIY3eG3D4nzv0SEZFSSs6ZoiIichwFuohIiihxpmi5vbHZDqD4hSGiawbsjGN3koH2uWrQPlcNJ7LP7Z1zEYcJJizQT4SZLY829TVVaZ+rBu1z1VBe+6ySi4hIilCgi4ikiGQN9DmJ7kACaJ+rBu1z1VAu+5yUNXQRETlesh6hi4hIEQp0EZEUUakDPYZL39Uys+cCz39kZh0qvpfxVaku91dBStrnkHYXmZkzs6Qf4hbLPpvZpYHv9Rdm9kxF9zHeYvjZbmdmb5vZJ4Gf7/MS0c94MbN5ZrbdzD6P8ryZ2aOBz2OVmfU54Td1zlXKL/xCYF8BpwA1gZVA1yJtfgb8b+D2eOC5RPe7AvZ5GFA3cHtqVdjnQLv6wBL8FbEyEt3vCvg+dwI+ARoH7rdIdL8rYJ/nAFMDt7sCOYnu9wnu81lAH+DzKM+fh78YkAED8JfxPKH3rMxH6CVe+i5wP3g57+eB4ZbcVwmoXJf7qxixfJ8B7gV+DxyuyM6Vk1j2+TpglnNuD4BL/mWpY9lnBzQI3G4IbK7A/sWdc24JsLuYJpnAU877EGhkZq1O5D0rc6BHuvRd62htnHP5wD6gaYX0rnzEss+hUuFyfyXuc+BP0bbOuVcqsmPlKJbvc2egs5m9b2YfmtmoCutd+Yhln2cAV5hZLn511xsrpmsJU9r/7yWK6yXopOKUdLm/VGFm1YCHgUkJ7kpFS8OXXYbi/wpbYmY9nHN7E9qr8jUB+Ktz7g9m9mPgaTPr7pw7luiOJYvKfIQey6XvCtqYWRr+z7RdFdK78hG3y/0lkZL2uT7QHXjHzHLwtcaFSX5iNJbvcy6w0Dl3xDn3NbAOH/DJKpZ9vhZYAOCc+zdQG7+IVaqK6f97aVTmQC+49J2Z1cSf9FxYpM1C4KrA7YuBt1zgbEOSKnGfQy73NzYF6qpQwj475/Y555o55zo45zrgzxuMdc4tT0x34yKWn+2X8EfnmFkzfAlmQ0V2Ms5i2edvgeEAZtYFH+ipfF2+hcDEwGiXAcA+59yWE9pios8El3CW+Dz8kclXwB2Bx+7B/4cG/w3/P2A9sBQ4JdF9roB9fhPYBnwa+FqY6D6X9z4XafsOST7KJcbvs+FLTauBz4Dxie5zBexzV+B9/AiYT4GRie7zCe7vs8AW4Aj+L65rgRuAG0K+x7MCn8dn8fi51tR/EZEUUZlLLiIiUgoKdBGRFKFAFxFJEQp0EZEUoUAXEUkRCnQRkRShQBcRSRH/H6+vgLlQiTc3AAAAAElFTkSuQmCC\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAEICAYAAABRSj9aAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nO3deZxU9ZX//9cBGloWQTaVtV2BZhU6oCgC0SAg0mh4GBxx1InB4RfN4jJhiBN9GP3GSYwxauISg45RUUelmwSXGDdAR2WJIosIQYwNyKYiiCIN5/fH5xZV9FpNV3d1Vb+fj8d9WHXvp259brec++nPPfdcc3dERCR7NUl3B0REpG4p0IuIZDkFehGRLKdALyKS5RToRUSynAK9iEiWU6CXGjOzpma2y8x6pLJtOpnZ8WaW8lxjMzvTzNYnvF9tZiOSaXsI33W/mc081M9Xsd+bzOzBVO9X6k+zdHdA6p6Z7Up42xLYA+yL3l/u7o/UZH/uvg9oneq2jYG790rFfszsMmCqu49K2Pdlqdi3ZB8F+kbA3Q8E2mjEeJm7/62y9mbWzN1L66NvIlL3NHUjsT/NHzez2Wa2E5hqZqeY2Rtm9pmZbTKzO8wsJ2rfzMzczPKi9w9H2581s51m9n9mdkxN20bbx5nZ+2a2w8zuNLPXzOySSvqdTB8vN7O1Zvapmd2R8NmmZvYbM9tuZuuAsVX8fH5qZo+VWfc7M7sten2Zma2Kjucf0Wi7sn2VmNmo6HVLM/tT1LcVwJAyba8zs3XRfleY2cRofX/gLmBENC22LeFne0PC5/89OvbtZlZkZkcn87OpjpmdG/XnMzN7ycx6JWybaWYbzexzM3sv4VhPNrOl0frNZvarZL9PUsDdtTSiBVgPnFlm3U3A18A5hJP/YcA3gGGEv/qOBd4HrojaNwMcyIvePwxsAwqAHOBx4OFDaNsZ2AkURtuuAvYCl1RyLMn0sRhoC+QBn8SOHbgCWAF0AzoA88M/hwq/51hgF9AqYd9bgILo/TlRGwO+CXwJDIi2nQmsT9hXCTAqen0r8ApwBNATWFmm7fnA0dHv5F+iPhwZbbsMeKVMPx8Gbohej4n6OAjIBX4PvJTMz6aC478JeDB63Sfqxzej39FMYHX0ui/wIXBU1PYY4Njo9SLgguh1G2BYuv8tNKZFI3qJWejuf3b3/e7+pbsvcvc33b3U3dcB9wEjq/j8k+6+2N33Ao8QAkxN204A3nb34mjbbwgnhQol2cdfuPsOd19PCKqx7zof+I27l7j7duCWKr5nHbCccAIC+Bbwqbsvjrb/2d3XefAS8CJQ4QXXMs4HbnL3T939Q8IoPfF7n3D3TdHv5FHCSbogif0CXAjc7+5vu/tXwAxgpJl1S2hT2c+mKlOAue7+UvQ7uoVwshgGlBJOKn2j6b8Pop8dhBP2CWbWwd13uvubSR6HpIACvcR8lPjGzHqb2Twz+9jMPgduBDpW8fmPE17vpuoLsJW17ZLYD3d3wgi4Qkn2ManvIoxEq/IocEH0+l+i97F+TDCzN83sEzP7jDCarupnFXN0VX0ws0vM7J1oiuQzoHeS+4VwfAf25+6fA58CXRPa1OR3Vtl+9xN+R13dfTVwNeH3sCWaCjwqanopkA+sNrO3zGx8kschKaBALzFlUwvvJYxij3f3w4GfEaYm6tImwlQKAGZmHByYyqpNHzcB3RPeV5f++QRwppl1JYzsH436eBjwJPALwrRKO+CvSfbj48r6YGbHAncD04EO0X7fS9hvdamgGwnTQbH9tSFMEW1Iol812W8Twu9sA4C7P+zupxKmbZoSfi64+2p3n0KYnvs18JSZ5dayL5IkBXqpTBtgB/CFmfUBLq+H7/wLMNjMzjGzZsAPgU511McngB+ZWVcz6wD8pKrG7v4xsBB4EFjt7muiTS2A5sBWYJ+ZTQDOqEEfZppZOwv3GVyRsK01IZhvJZzzvkcY0cdsBrrFLj5XYDbwXTMbYGYtCAF3gbtX+hdSDfo80cxGRd99LeG6yptm1sfMRkff92W07CccwEVm1jH6C2BHdGz7a9kXSZICvVTmauBiwj/iewkXTeuUu28GvgPcBmwHjgP+Tsj7T3Uf7ybMpb9LuFD4ZBKfeZRwcfXAtI27fwb8GJhDuKA5mXDCSsb1hL8s1gPPAg8l7HcZcCfwVtSmF5A4r/0CsAbYbGaJUzCxzz9HmEKZE32+B2HevlbcfQXhZ3434SQ0FpgYzde3AH5JuK7yMeEviJ9GHx0PrLKQ1XUr8B13/7q2/ZHkWJgGFWl4zKwpYapgsrsvSHd/RDKVRvTSoJjZ2GgqowXwX4RsjbfS3C2RjKZALw3NacA6wrTAWcC57l7Z1I2IJEFTNyIiWU4jehGRLNcgi5p17NjR8/Ly0t0NEZGMsWTJkm3uXmE6coMM9Hl5eSxevDjd3RARyRhmVund3Zq6ERHJcgr0IiJZToFeRCTLNcg5ehGpe3v37qWkpISvvvoq3V2RGsjNzaVbt27k5FRW5qg8BXqRRqqkpIQ2bdqQl5dHKBQqDZ27s337dkpKSjjmmGOq/0BEUzcijdRXX31Fhw4dFOQziJnRoUOHGv8VpkAv0ogpyGeeQ/mdZVeg//nP4amnYNeudPdERKTByJ5Av3s33HknTJ4MHTvChAlw//2weXO6eyYiFdi+fTuDBg1i0KBBHHXUUXTt2vXA+6+/Tq5U/aWXXsrq1aurbPO73/2ORx55JBVd5rTTTuPtt99Oyb7qU/ZcjG3ZEjZuhIULobgYiopg3jwwg1NOgUmToLAQTjwx3T0VEaBDhw4HguYNN9xA69atueaaaw5q4+64O02aVDwmfeCBB6r9nu9///u172yGy54RPUCzZjBqFPzmN7BuHbz9NtxwA3z5JfzHf0CvXpCfDzNnwptvwn49yUykoVm7di35+flceOGF9O3bl02bNjFt2jQKCgro27cvN95444G2sRF2aWkp7dq1Y8aMGQwcOJBTTjmFLVu2AHDddddx++23H2g/Y8YMhg4dSq9evXj99dcB+OKLL/j2t79Nfn4+kydPpqCgIOmR+5dffsnFF19M//79GTx4MPPnzwfg3Xff5Rvf+AaDBg1iwIABrFu3jp07dzJu3DgGDhxIv379ePLJZB5sVnvZM6IvywwGDgzLz34GH34Ic+eGkf4vfwm/+AUcfTRMnBhG+6NHQ4sW6e61SHr86EdhYJRKgwZBFGBr6r333uOhhx6ioKAAgFtuuYX27dtTWlrK6NGjmTx5Mvn5+Qd9ZseOHYwcOZJbbrmFq666ilmzZjFjxoxy+3Z33nrrLebOncuNN97Ic889x5133slRRx3FU089xTvvvMPgwYOT7usdd9xBixYtePfdd1mxYgXjx49nzZo1/P73v+eaa67hO9/5Dnv27MHdKS4uJi8vj2efffZAn+tDdo3oq9KzJ1x5Jbz4ImzZAn/6EwwfDg8/DOPGQadO8J3vwOzZUE8/fBGp2HHHHXcgyAPMnj2bwYMHM3jwYFatWsXKlSvLfeawww5j3LhxAAwZMoT169dXuO/zzjuvXJuFCxcyZcoUAAYOHEjfvn2T7uvChQuZOnUqAH379qVLly6sXbuW4cOHc9NNN/HLX/6Sjz76iNzcXAYMGMBzzz3HjBkzeO2112jbtm3S31Mb2Tuir0r79jB1ali++ioE/6KiMOJ/4gnIyQlTQJMmhRF/t27p7rFI3TrEkXddadWq1YHXa9as4be//S1vvfUW7dq1Y+rUqRXmkTdv3vzA66ZNm1JaWlrhvltEf7lX1SYVLrroIk455RTmzZvH2LFjmTVrFqeffjqLFy/mmWeeYcaMGYwbN46ZM2fWWR9iGs+IvjK5uXD22fCHP4SLua+9Bj/+MaxfD9//PnTvDt/4Btx8MyxfDnoil0i9+vzzz2nTpg2HH344mzZt4vnnn0/5d5x66qk88cQTQJhbr+gvhsqMGDHiQFbPqlWr2LRpE8cffzzr1q3j+OOP54c//CETJkxg2bJlbNiwgdatW3PRRRdx9dVXs3Tp0pQfS0WqHdGbWXfgIeBIwIH73P23ZdqMAoqBD6JVT7v7jdG2scBvgabA/e5+S8p6n2pNm4bpnOHD4ZZb4L334hk8110XluOOi2fwDB8ePiMidWbw4MHk5+fTu3dvevbsyamnnpry77jyyiv513/9V/Lz8w8slU2rnHXWWQfqzIwYMYJZs2Zx+eWX079/f3JycnjooYdo3rw5jz76KLNnzyYnJ4cuXbpwww038PrrrzNjxgyaNGlC8+bNueeee1J+LBWp9pmxZnY0cLS7LzWzNsASYJK7r0xoMwq4xt0nlPlsU+B94FtACbAIuCDxsxUpKCjwBvfgkY0b4c9/DkH/xRdh794wr3/OOSHof+tbcNhh6e6lSNJWrVpFnz590t2NBqG0tJTS0lJyc3NZs2YNY8aMYc2aNTRr1jBntyv63ZnZEncvqKh9tUfh7puATdHrnWa2CugKJPO3zVBgrbuvizryGFCY5Gcbli5d4PLLw/L55/DccyHoP/UUzJoV8vjHjAmj/QkToEOHdPdYRJK0a9cuzjjjDEpLS3F37r333gYb5A9FjY7EzPKAk4A3K9h8ipm9A2wkjO5XEE4IHyW0KQGGVbLvacA0gB49etSkW/Xv8MPh/PPD8vXX8OqrIejHpnmaNIERI+JTPDWoMici9a9du3YsWbIk3d2oM0lfjDWz1sBTwI/c/fMym5cCPd19IHAnUFTTjrj7fe5e4O4FnTpV+Hzbhql58zBt87vfwUcfwaJF8J//Cdu3h4u6xx4bcvmvvx6WLtXFXBGpd0kFejPLIQT5R9z96bLb3f1zd98VvX4GyDGzjsAGoHtC027RuuxkBgUFcNNN8O67sHYt/PrX0LZtWDdkCOTlwQ9+EJ/nFxGpY9UGegs1Mf8IrHL32yppc1TUDjMbGu13O+Hi6wlmdoyZNQemAHNT1fkG77jj4KqrYP58+PjjMJc/aFBI5TzzTOjcGS66CJ58UhU3RaTOJDNHfypwEfCumcXukZ4J9ABw93uAycB0MysFvgSmeEjnKTWzK4DnCemVs6K5+8anUye49NKwfPEFvPBCmM//85/D3bktWoTgX1gYMnmOOirdPRaRLFHtiN7dF7q7ufsAdx8ULc+4+z1RkMfd73L3vu4+0N1PdvfXEz7/jLuf6O7HufvNdXkwGaNVq3Ch9sEHQxnlV16B6dNhxQqYNi1k+AwfHmryvP9+unsrUidGjx5d7uan22+/nenTp1f5udatWwOwceNGJk+eXGGbUaNGUV2K9u23387u3bsPvB8/fjyfffZZMl2v0g033MCtt95a6/2kku6MTbdmzWDkyHjFzXfeCRU39+yBn/wkVNzs0ydc4FXFTckiF1xwAY899thB6x577DEuuOCCpD7fpUuXWlV/LBvon3nmGdq1a3fI+2vIFOgbEjMYMCBU21yyJFTcvOMO6NoVfvUrOPnkUHfn3/8dnn02nAxEMtTkyZOZN2/egYeMrF+/no0bNzJixIgDee2DBw+mf//+FBcXl/v8+vXr6devHxBKBU+ZMoU+ffpw7rnn8uWXXx5oN3369AMljq+//nogVJzcuHEjo0ePZvTo0QDk5eWxbds2AG677Tb69etHv379DpQ4Xr9+PX369OF73/seffv2ZcyYMQd9T3Uq2ucXX3zB2WeffaBs8eOPPw7AjBkzyM/PZ8CAAeVq9B+K7LkjIBv16BEqbl55JXz6KTzzTJjXf/hhuPdeaNMmVN4sLITx4yFLRyNS99JRpbh9+/YMHTqUZ599lsLCQh577DHOP/98zIzc3FzmzJnD4YcfzrZt2zj55JOZOHFipc9Lvfvuu2nZsiWrVq1i2bJlB5UZvvnmm2nfvj379u3jjDPOYNmyZfzgBz/gtttu4+WXX6Zjx44H7WvJkiU88MADvPnmm7g7w4YNY+TIkRxxxBGsWbOG2bNn84c//IHzzz+fp5566kDlyqpUts9169bRpUsX5s2bB4Syxdu3b2fOnDm89957mFlKppM0os8URxwBF14I//u/sG1beHrWlCnhZq0LLwwXe8eMCfn8JSXp7q1IUhKnbxKnbdydmTNnMmDAAM4880w2bNjA5ioeCzp//vwDAXfAgAEMGDDgwLYnnniCwYMHc9JJJ7FixYpqC5YtXLiQc889l1atWtG6dWvOO+88FixYAMAxxxzDoEGDgKpLISe7z/79+/PCCy/wk5/8hAULFtC2bVvatm1Lbm4u3/3ud3n66adp2bJlUt9RFY3oM1FubhjBjx8P99wT5u6LisJyxRVhKSgII/1Jk6Bv3zAtJFKJdFUpLiws5Mc//jFLly5l9+7dDBkyBIBHHnmErVu3smTJEnJycsjLy6uwNHF1PvjgA2699VYWLVrEEUccwSWXXHJI+4lpkfBwoqZNm9Zo6qYiJ554IkuXLuWZZ57huuuu44wzzuBnP/sZb731Fi+++CJPPvkkd911Fy+99FKtvkcj+kzXpEl4Ju5//zesXg2rVoWnZzVrBv/1X9C/Pxx/PFx9NSxYAPv2pbvHIge0bt2a0aNH82//9m8HXYTdsWMHnTt3Jicnh5dffpkPP/ywyv2cfvrpPProowAsX76cZcuWAaHEcatWrWjbti2bN28+8GQngDZt2rBz585y+xoxYgRFRUXs3r2bL774gjlz5jBixIhaHWdl+9y4cSMtW7Zk6tSpXHvttSxdupRdu3axY8cOxo8fz29+8xveeeedWn03aESffXr3hhkzwrJpU3iYSnEx3HUX3HYbdOwY8vQnTQp5+yn4s1CkNi644ALOPffcgzJwLrzwQs455xz69+9PQUEBvXv3rnIf06dP59JLL6VPnz706dPnwF8GAwcO5KSTTqJ379507979oBLH06ZNY+zYsXTp0oWXX375wPrBgwdzySWXMHToUAAuu+wyTjrppKSnaQBuuummAxdcAUpKSirc5/PPP8+1115LkyZNyMnJ4e6772bnzp0UFhby1Vdf4e7cdluF96nWSLVlitOhQZYpznSxipvFxWF+f8eOUFb5rLPCFM+ECeEkII2GyhRnrpSXKZYsUbbi5vz58Xn9xIqbhYVhOfbYdPdYRFJEc/SNUfPmYdrmrrtCxc3Fi2HmzFBx86qrQo2egQNDPr8qbopkPAX6xs4sVNX8+c8PrrjZrl14Tu6QIdCzZ8jlV8XNrNMQp26laofyO1Ogl4PFKm6++mq84ubgwXD//fGKm1Onhnz+CjIWJHPk5uayfft2BfsM4u5s376d3NzcGn1OF2MlObGKm8XFoeLm9u3xKaDCQpg4URU3M8zevXspKSmpVV651L/c3Fy6det24AHlMVVdjFWgl5orLYXXXos/OvGDD8IU0LBhIW1z0qRQjE1E6o0CvdQdd1i+PJ69s3RpWN+rVzzoDx0asnpEpM4o0Ev9+ec/w01aRUVhnr+0NEzpTJwYgv43vxkesiIiKaVAL+kRq7hZXBzKKu/aBa1bH1xx84gj0t1LkaygQC/p99VX8NJLIegXF4cnazVrBqNGxW/S6t692t2ISMUU6KVh2b8/VNyMXcxdvTqsHzIkXnGzXz9V3BSpAQV6adjeey8e9N94I6w75pj4xdzhw8PoX0QqpUAvmWPTppCnX1QU7sT9+mvo0CFecfNb31LFTZEK1CrQm1l34CHgSMCB+9z9t2XaXAj8BDBgJzDd3d+Jtq2P1u0DSivrSCIFegHCnbexipt/+Uu84uaYMSHoq+KmyAG1DfRHA0e7+1IzawMsASa5+8qENsOBVe7+qZmNA25w92HRtvVAgbtvS7bDCvRSzt69B1fcLCkJufmnnRaf11fFTWnEqgr01d7F4u6b3H1p9HonsAroWqbN6+7+afT2DaBb7bosUkZODpxxBtx5Z8jVX7IEfvrTkMJ59dWhRs+AAeGpWkuWqOKmSIIazdGbWR4wH+jn7p9X0uYaoLe7Xxa9/wD4lDDtc6+731fJ56YB0wB69OgxpLpHh4kcsG5d/GLuwoUhq6dbt/hIf+TIcKIQyWIpuRhrZq2BV4Gb3f3pStqMBn4PnObu26N1Xd19g5l1Bl4ArnT3+VV9l6Zu5JBt2xbm84uL4fnn4csvoW1bOPvsEPTHjoU2bdLdS5GUq9XUTbSDHOAp4JEqgvwA4H6gMBbkAdx9Q/TfLcAcYGjNui9SAx07wiWXwJw5IegXFcF554Wgf/75Yfv48XDffSHDR6QRqDbQm5kBfyRcbK3wKbVm1gN4GrjI3d9PWN8quoCLmbUCxgDLU9FxkWq1bBmmb2bNCrX1X30Vrrgi3KB1+eXQpQuccgrcckvI5RfJUslk3ZwGLADeBfZHq2cCPQDc/R4zux/4NhCbWC919wIzO5YwiofwfNpH3f3m6jqlqRupU+6wYkU8g2fJkrC+V6/4vP6wYaq4KRlFN0yJVOWjj+IVN195JVTcPPLIgytu1vCJPiL1TYFeJFmffRYqbhYVHVxxc+zYEPRVcVMaKAV6kUOxZ8/BFTc//jjU3Bk5MgR9VdyUBkSBXqS29u+Ht96K5+vHLt4OHhwP+v37q+KmpI0CvUiqrV59cMVN91BxM3Yx99RTVXFT6pUCvUhd+vjjeMXNv/0tXnFzwoQQ9MeMUcVNqXMK9CL1ZefOcHNWURHMmxcu7h52WCivHKu42alTunspWUiBXiQdEituFheHNM4mTcK0Tmxe/7jj0t1LyRIK9CLp5g5//3t8Xn/ZsrC+X7940B8yRBdz5ZAp0Is0NB98EA/6CxYcXHGzsDCkcDZvnu5eSgZRoBdpyLZtC/P5RUUHV9wcPz5ecfPww9PdS2ngFOhFMsXu3SFzp6goZPJs2xZG9t/8Zgj6EyfC0Uenu5fSACnQi2Siffvg9dfjxdfWrQvrhw0LQX/SJOjdO719lAZDgV4k08Uqbsbm9WP/Pk48MX4x9+STVXGzEVOgF8k2JSXxipsvv3xwxc3CwvB8XVXcbFQU6EWy2WefhUqbRUWh8uauXdCqFYwbF4L+2Wer4mYjoEAv0ljs2RNG+EVFYcS/aRM0bXpwxc0ePdLdS6kDCvQijdH+/bBoUfzO3FWrwvqTTopfzFXFzayhQC8i8YqbxcXwf/8XLvDm5cVH+qedpoqbGUyBXkQOtnnzwRU39+yB9u3hnHNC0B8zJszzS8ZQoBeRyu3aFa+4+Ze/hIu7ubnxipvnnKOKmxlAgV5EkrN3b6i9E7tJK1Zxc/jw+BTP8cenu5dSgaoCfbV3V5hZdzN72cxWmtkKM/thBW3MzO4ws7VmtszMBidsu9jM1kTLxbU7FBGpUzk5odzCHXfAhx/C0qVw3XWhzv4118AJJ4SKm9ddFy70NsCBopRX7YjezI4Gjnb3pWbWBlgCTHL3lQltxgNXAuOBYcBv3X2YmbUHFgMFgEefHeLun1b1nRrRizRAsYqbxcWhzv7+/dC1a7zi5qhRqriZRrUa0bv7JndfGr3eCawCupZpVgg85MEbQLvoBHEW8IK7fxIF9xeAsbU4FhFJl2OOgR/9KOTpb94MDz4IQ4fCAw/AWWeFefx/+Rd4/HH4/PN091YS1KgwhpnlAScBb5bZ1BX4KOF9SbSusvUV7XuamS02s8Vbt26tSbdEpL517AgXXwxPPw3bt4ebsyZPhhdegClTwvaxY+Gee2DjxnT3ttFLOtCbWWvgKeBH7p7y07W73+fuBe5e0ElX+EUyx2GHhcycP/4xPCh9wQL4wQ9g7VqYPj1M7wwbBr/4RbhpS/P69S6pQG9mOYQg/4i7P11Bkw1A94T33aJ1la0XkWzUtGm48erWW2HNGli+HG66KQT3mTMhPx969YL/+A947bVQilnqXDIXYw34H+ATd/9RJW3OBq4gfjH2DncfGl2MXQLEsnCWEi7GflLVd+pirEgWilXcLC6Gl14KFTc7d45X3DzzTFXcrIVa5dGb2WnAAuBdYH+0eibQA8Dd74lOBncRLrTuBi5198XR5/8tag9ws7s/UF2HFehFslys4mZxcai4uXNnuBN37Nh4xc327dPdy4yiG6ZEpOGKVdyMpW4mVtyMpW727JnuXjZ4CvQikhn27w9Pz4rdmRuruDloULzi5oABqrhZAQV6EclM778ff3xirOJmz57xoK+Kmwco0ItI5otV3CwuDvn6sYqbEyaE6Z2zzmrUFTcV6EUku8QqbhYXh4qbn34ar7hZWBjy+jt3Tncv65UCvYhkr1jFzdgUzz//GebwTz01BP1JkxpFxU0FehFpHNzh7bfjQf+dd8L6/Pz4vP6QIaH0cpZRoBeRxmn9+njQX7Ag3InbpUt8pJ9FFTcV6EVEtm+HefNC0H/+edi9Gw4/HMaPD0F/3LjwPkMp0IuIJPryy/Cs3OLiUJZh69b4Q1cKC0NZhq4VFtptsBToRUQqs29fyNEvLoY5c+Af/wjrhw6NT/H06dPgb9JSoBcRSYY7rFwZn9dftCisP+GEeNA/+eRQoqGBUaAXETkUGzaEqZ2iolCPZ+/e8CStiRND0D/jjFCPvwFQoBcRqa0dO0LFzaKieMXNli1Dxc1Jk9JecVOBXkQklfbsgVdeCUF/7tzwuMSmTeH000PQT0PFTQV6EZG6Equ4GZvXX7kyrB80KD6vP3BgnV/MVaAXEakva9bEg/7rr8crbsaC/ogRdVJxU4FeRCQdNm8ORdeKig6uuHn22SHop7DipgK9iEi67doFf/1rCPqxipstWoSKm5Mm1bripgK9iEhDsncvLFwYgn5xMXz4YZjDHzEi3LGbk1PjXVYV6PVoFhGR+paTA6NHh+X220OVzeJiKCk5pCBfnWoDvZnNAiYAW9y9XwXbrwUuTNhfH6CTu39iZuuBncA+oLSys42ISKNlFjJ0Bg2qs69Ipijzg8DYyja6+6/cfZC7DwL+E3jV3T9JaDI62q4gLyKSBtUGenefD3xSXbvIBcDsWvVIRERSKmWPWTGzloSR/1MJqx34q5ktMbNpqfouERFJXiovxp4DvFZm2uY0d99gZp2BF8zsvegvhHKiE8E0gB49eqSwWyIijVsqH5w4hTLTNu6+IfrvFmAOMLSyD7v7fe5e4O4FnTp1SmG3RA0ac1kAAA6iSURBVEQat5QEejNrC4wEihPWtTKzNrHXwBhgeSq+T0REkpdMeuVsYBTQ0cxKgOuBHAB3vydqdi7wV3f/IuGjRwJzLBTyaQY86u7Ppa7rIiKSjGoDvbtfkESbBwlpmInr1gEDD7VjIiKSGqmcoxcRkQZIgV5EJMsp0IuIZDkFehGRLKdALyKS5RToRUSynAK9iEiWU6AXEclyCvQiIllOgV5EJMsp0IuIZDkFehGRLKdALyKS5RToRUSynAK9iEiWU6AXEclyCvQiIllOgV5EJMsp0IuIZDkFehGRLKdALyKS5aoN9GY2y8y2mNnySraPMrMdZvZ2tPwsYdtYM1ttZmvNbEYqOy4iIslJZkT/IDC2mjYL3H1QtNwIYGZNgd8B44B84AIzy69NZ0VEpOaqDfTuPh/45BD2PRRY6+7r3P1r4DGg8BD2IyIitZCqOfpTzOwdM3vWzPpG67oCHyW0KYnWVcjMppnZYjNbvHXr1hR1S0REUhHolwI93X0gcCdQdCg7cff73L3A3Qs6deqUgm6JiAikINC7++fuvit6/QyQY2YdgQ1A94Sm3aJ1IiJSj2od6M3sKDOz6PXQaJ/bgUXACWZ2jJk1B6YAc2v7fSIiUjPNqmtgZrOBUUBHMysBrgdyANz9HmAyMN3MSoEvgSnu7kCpmV0BPA80BWa5+4o6OQoREamUhZjcsBQUFPjixYvT3Q0RkYxhZkvcvaCibbozVkQkyynQi4hkOQV6EZEsp0AvIpLlFOhFRLKcAr2ISJZToBcRyXIK9CIiWU6BXkQkyynQi4hkuWpr3WSSESOgY0fo1w/69g3/PfFEaN483T0TEUmfrAn0e/fCkUfCihUwdy7s3x/WN2sGvXrFA3/sJHDccdC0aXr7LCJSH7Im0OfkwJNPhtdffQWrV8Py5SHwL18OixbBE0/E2+fmQp8+B4/++/WDHj0gFF0WEckOWRPoE+XmwsCBYUm0axesWhUCf+wk8NJL8Kc/xdu0bh0P/IkngKOO0glARDKTyhQDn34KK1cefAJ4913Yti3epn378tM//fpBhw711k0RkUpVVaY4K0f0NXXEEXDqqWFJtGXLwdM/y5fDo4/Cjh3xNkcddfAJoF8/yM+Hww+v32MQEamMRvQ15A4bNpQ/AaxcCbt3x9v16FF++qdPHzjssPT1XUSyl0b0KWQG3bqFZezY+Pr9+2H9+oOnf5Yvh7/9Db7+Ov7Z444rP/2jFFARqUsK9CnSpAkce2xYJk6Mry8thbVr4yeA2Engz3+GfftCm2bNQrBPnP5RCqiIpIqmbtIklgKaOP2zYgWsWxdvE0sBLXsRuEePcGIREYnR1E0DVF0KaOIJ4JVX4OGH422UAioiNVHtiN7MZgETgC3u3q+C7RcCPwEM2AlMd/d3om3ro3X7gNLKzjZlNYYRfU199lk8+CeeBLZujbc54ojy8/9KARVpHGo7on8QuAt4qJLtHwAj3f1TMxsH3AcMS9g+2t23VfxRSVa7dpWngJad/qkuBbRv37AoBVSkcag20Lv7fDPLq2L76wlv3wC61b5bkqzOncMyenR8XSwFtOwJ4A9/UAqoSGOU6jn67wLPJrx34K9m5sC97n5fZR80s2nANIAePXqkuFuNS2IK6FlnxdcnpoAmngQqSwFNPAEoBVQkcyWVdRON6P9S0Rx9QpvRwO+B09x9e7Suq7tvMLPOwAvAle4+v7rv0xx9/UpMAU08AaxZU3kKaOwkoBRQkYahzrNuzGwAcD8wLhbkAdx9Q/TfLWY2BxgKVBvopX41awa9e4dl8uT4+j174lVAYyeBxYvLVwHt3bv8PQBKARVpOGod6M2sB/A0cJG7v5+wvhXQxN13Rq/HADfW9vuk/rRoAQMGhCXRF1+UrwJaWQpo2YvARx+tFFCR+pZMeuVsYBTQEdgMXA/kALj7PWZ2P/Bt4MPoI6XuXmBmxwJzonXNgEfd/eZkOqWpm8wUSwEtexF4y5Z4G6WAitSNqqZudGes1LnEFNDEk0BiCuiRR5af/lEKqEjydGespFVlKaAbN5YvAldRCmjZ6Z8+faBly/o/DpFMpUAvaWEGXbuGpWwK6Icfli8C9+KL5VNAyz4HQCmgIhVToJcGpUkTOOaYsJxzTnx9LAU0cepn+XL4y18qTgFNPAkoBVQaO83RS0ZLTAFNPAl88EGYHoKQPRR7EHziSUApoJJNNEcvWSuZFNDYCaCiFND8/PIXgZUCKtlGI3ppVD77rPyD4JcvrzgFtOxF4I4d09dvkepoRC8SadcOhg8PS6JYCmji9M9jj4UTQ0xiCmji8wCUAioNnQK9CNWngCaeAO6/P0wNxXTvXv4EoBRQaUgU6EUqkUwKaOIJoLoU0L59oVcvpYBK/VOgF6mhqlJA//GP8vP/FaWAlr0HQCmgUpd0MVakjsVSQCt6EHxFKaCJJwGlgEqydDFWJI2qSwFNPAG8+urBKaCtWpWf/unXTymgUjMa0Ys0MDt2VFwErmwKaNnpH6WANm4a0YtkkLZtK04B3bq1/PRPZSmgZf8KUApo46ZAL5IhOnWCUaPCEhNLAS17AvjjH5UCKnEK9CIZLDEFdMyY+PpYCmjZInAvvRQuDsc+e+yx5U8ASgHNPgr0IlkoMQV0woT4+sQU0MSTQHUpoH37hhTQZooYGUkXY0WEPXvg/ffL3wOgFNDMoYuxIlKlFi2gf/+wJNq9u/yD4KtKAU08ASgFtOHQiF5EaiyWAlr2IvDmzfE27dqVT//s108poHVFI3oRSalkUkBj/60oBbTsPQD5+WGfUjeSCvRmNguYAGxx934VbDfgt8B4YDdwibsvjbZdDFwXNb3J3f8nFR0XkYanshTQTZvKz/9XlgKaeBJQCmhqJDuifxC4C3ioku3jgBOiZRhwNzDMzNoD1wMFgANLzGyuu39am06LSOYwgy5dwlI2BfSf/yx/AlAKaOolFejdfb6Z5VXRpBB4yMOE/xtm1s7MjgZGAS+4+ycAZvYCMBaYXZtOi0jma9IE8vLCUlEKaNn5/7IpoCecUP4EoBTQiqXqR9IV+CjhfUm0rrL15ZjZNGAaQI8ePVLULRHJNM2ahRF7r15w3nnx9YkpoLGTwNKl8OST5VNAy94D0LNn404BbTDnPne/D7gPQtZNmrsjIg1MMimgsRPA/PnwyCPxNo09BTRVgX4D0D3hfbdo3QbC9E3i+ldS9J0iIrRsCUOGhCXRjh3lHwQ/bx7MmhVvk5gCmngCyLYU0FQF+rnAFWb2GOFi7A5332RmzwP/z8yOiNqNAf4zRd8pIlKptm3hlFPCkmjbtvI1gMqmgHbuXP4egL59MzcFNNn0ytmEkXlHMyshZNLkALj7PcAzhNTKtYT0ykujbZ+Y2c+BRdGuboxdmBURSYeOHWHkyLDEJKaAJp4EKkoBLTv/n5/f8FNAdWesiEglElNAE08Aq1ZVnAKaeBKo7xRQ3RkrInIIqkoBXbeu6gfBN20aqoA2hBRQjehFRFIklgJa9h6Af/zj4BTQ3r3LnwBqmwKqEb2ISD2oLgU08QSwYEH5FNCTTgqpoalO+VSgFxGpY8mkgK5YES781kVevwK9iEiaVJYCmmqN+KZgEZHGQYFeRCTLKdCLiGQ5BXoRkSynQC8ikuUU6EVEspwCvYhIllOgFxHJcg2y1o2ZbQU+PMSPdwS2pbA7mUDHnP0a2/GCjrmmerp7p4o2NMhAXxtmtriywj7ZSsec/Rrb8YKOOZU0dSMikuUU6EVEslw2Bvr70t2BNNAxZ7/GdrygY06ZrJujFxGRg2XjiF5ERBIo0IuIZLmMDfRmNtbMVpvZWjObUcH2Fmb2eLT9TTPLq/9epk4Sx3uVma00s2Vm9qKZ9UxHP1OpumNOaPdtM3Mzy/hUvGSO2czOj37XK8zs0fruY6ol8f92DzN72cz+Hv3/PT4d/UwVM5tlZlvMbHkl283M7oh+HsvMbHCtv9TdM24BmgL/AI4FmgPvAPll2vx/wD3R6ynA4+nudx0f72igZfR6eiYfb7LHHLVrA8wH3gAK0t3vevg9nwD8HTgiet853f2uh2O+D5gevc4H1qe737U85tOBwcDySraPB54FDDgZeLO235mpI/qhwFp3X+fuXwOPAYVl2hQC/xO9fhI4w6wunsZYL6o9Xnd/2d13R2/fALrVcx9TLZnfMcDPgf8GvqrPztWRZI75e8Dv3P1TAHffUs99TLVkjtmBw6PXbYGN9di/lHP3+cAnVTQpBB7y4A2gnZkdXZvvzNRA3xX4KOF9SbSuwjbuXgrsADrUS+9SL5njTfRdwoggk1V7zNGftN3dfV59dqwOJfN7PhE40cxeM7M3zGxsvfWubiRzzDcAU82sBHgGuLJ+upY2Nf33Xi09HDzLmNlUoAAYme6+1CUzawLcBlyS5q7Ut2aE6ZtRhL/a5ptZf3f/LK29qlsXAA+6+6/N7BTgT2bWz933p7tjmSJTR/QbgO4J77tF6ypsY2bNCH/yba+X3qVeMseLmZ0J/BSY6O576qlvdaW6Y24D9ANeMbP1hLnMuRl+QTaZ33MJMNfd97r7B8D7hMCfqZI55u8CTwC4+/8BuYTiX9kqqX/vNZGpgX4RcIKZHWNmzQkXW+eWaTMXuDh6PRl4yaMrHRmo2uM1s5OAewlBPtPnbaGaY3b3He7e0d3z3D2PcF1iorsvTk93UyKZ/6+LCKN5zKwjYSpnXX12MsWSOeZ/AmcAmFkfQqDfWq+9rF9zgX+Nsm9OBna4+6ba7DAjp27cvdTMrgCeJ1y1n+XuK8zsRmCxu88F/kj4E28t4cLHlPT1uHaSPN5fAa2B/42uOf/T3SemrdO1lOQxZ5Ukj/l5YIyZrQT2Ade6e6b+pZrsMV8N/MHMfky4MHtJBg/aMLPZhJN1x+i6w/VADoC730O4DjEeWAvsBi6t9Xdm8M9LRESSkKlTNyIikiQFehGRLKdALyKS5RToRUSynAK9iEiWU6AXEclyCvQiIlnu/wce8eaK5jxjFwAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# Plot the chart for accuracy and loss on both training and validation\n", + "%matplotlib inline\n", + "import matplotlib.pyplot as plt\n", + "acc = history.history['acc'] # Your Code Here # Your Code Here\n", + "val_acc = history.history['val_acc'] # Your Code Here\n", + "loss = history.history['loss'] # Your Code Here\n", + "val_loss = history.history['val_loss'] # Your Code Here\n", + "\n", + "epochs = range(len(acc))\n", + "\n", + "plt.plot(epochs, acc, 'r', label='Training accuracy')\n", + "plt.plot(epochs, val_acc, 'b', label='Validation accuracy')\n", + "plt.title('Training and validation accuracy')\n", + "plt.legend()\n", + "plt.figure()\n", + "\n", + "plt.plot(epochs, loss, 'r', label='Training Loss')\n", + "plt.plot(epochs, val_loss, 'b', label='Validation Loss')\n", + "plt.title('Training and validation loss')\n", + "plt.legend()\n", + "\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Submission Instructions" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Now click the 'Submit Assignment' button above." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# When you're done or would like to take a break, please run the two cells below to save your work and close the Notebook. This will free up resources for your fellow learners. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "\n", + "IPython.notebook.save_checkpoint();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%javascript\n", + "IPython.notebook.session.delete();\n", + "window.onbeforeunload = null\n", + "setTimeout(function() { window.close(); }, 1000);" + ] + } + ], + "metadata": { + "colab": { + "name": "Exercise 8 - Question.ipynb", + "provenance": [] + }, + "coursera": { + "course_slug": "convolutional-neural-networks-tensorflow", + "graded_item_id": "8mIh8", + "launcher_item_id": "gg95t" + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.8" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1a.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1a.png new file mode 100644 index 0000000..dfebc15 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1a.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1b.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1b.png new file mode 100644 index 0000000..c8383d3 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1b.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1c.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1c.png new file mode 100644 index 0000000..f5173d7 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w2_quizz1c.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1a.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1a.png new file mode 100644 index 0000000..0c31c5b Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1a.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1b.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1b.png new file mode 100644 index 0000000..7090bfe Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1b.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1c.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1c.png new file mode 100644 index 0000000..8b3f416 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w3_quizz1c.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1a.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1a.png new file mode 100644 index 0000000..6ac19a5 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1a.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1b.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1b.png new file mode 100644 index 0000000..f8d6b31 Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1b.png differ diff --git a/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1c.png b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1c.png new file mode 100644 index 0000000..d0861fb Binary files /dev/null and b/Course 2: Convolutional Neural Networks in Tensorflow/img/w4_quizz1c.png differ