From 70e10b54835c83e0f9277e9d5f86d0809e68dc91 Mon Sep 17 00:00:00 2001 From: Kaustubh Dixit <40864637+KD-3@users.noreply.github.com> Date: Wed, 24 Jun 2020 02:37:08 +0530 Subject: [PATCH] Trained the model. --- BTP_Final_InceptionV3_Model.ipynb | 1020 ++++++++++++++++++++++++++++- 1 file changed, 1005 insertions(+), 15 deletions(-) diff --git a/BTP_Final_InceptionV3_Model.ipynb b/BTP_Final_InceptionV3_Model.ipynb index e1d1ed0..30c7976 100644 --- a/BTP_Final_InceptionV3_Model.ipynb +++ b/BTP_Final_InceptionV3_Model.ipynb @@ -5,12 +5,13 @@ "colab": { "name": "BTP_Final_InceptionV3_Model", "provenance": [], - "authorship_tag": "ABX9TyOTeul3wlJvUhUlX0OowbKm" + "authorship_tag": "ABX9TyNJlqWprmqdsMC3xKLQ2nmu" }, "kernelspec": { "name": "python3", "display_name": "Python 3" - } + }, + "accelerator": "GPU" }, "cells": [ { @@ -18,33 +19,66 @@ "metadata": { "id": "s8nBLrDw7urc", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 124 + }, + "outputId": "5a76ee4d-1000-4de4-8b1b-d66de70cf7a6" }, "source": [ "from google.colab import drive\n", "drive.mount('/gdrive')" ], - "execution_count": null, - "outputs": [] + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3aietf%3awg%3aoauth%3a2.0%3aoob&response_type=code&scope=email%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdocs.test%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive%20https%3a%2f%2fwww.googleapis.com%2fauth%2fdrive.photos.readonly%20https%3a%2f%2fwww.googleapis.com%2fauth%2fpeopleapi.readonly\n", + "\n", + "Enter your authorization code:\n", + "··········\n", + "Mounted at /gdrive\n" + ], + "name": "stdout" + } + ] }, { "cell_type": "code", "metadata": { "id": "Pj4RIJO574K_", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 357 + }, + "outputId": "7ac9d44e-1e3e-4a9e-cc07-d3bd4eed5530" }, "source": [ "import os\n", "import zipfile\n", "\n", - "local_zip = '/content/drive/My Drive/NEU-DET_new.zip'\n", + "local_zip = '/content/NEU-DET'\n", "zip_ref = zipfile.ZipFile(local_zip, 'r')\n", "zip_ref.extractall('./')\n", "zip_ref.close()" ], - "execution_count": null, - "outputs": [] + "execution_count": 5, + "outputs": [ + { + "output_type": "error", + "ename": "IsADirectoryError", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIsADirectoryError\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mlocal_zip\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m'/content/NEU-DET'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m \u001b[0mzip_ref\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mzipfile\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mZipFile\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlocal_zip\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'r'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 6\u001b[0m \u001b[0mzip_ref\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextractall\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'./'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0mzip_ref\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/lib/python3.6/zipfile.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, file, mode, compression, allowZip64)\u001b[0m\n\u001b[1;32m 1111\u001b[0m \u001b[0;32mwhile\u001b[0m \u001b[0;32mTrue\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1112\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1113\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mio\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mopen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfile\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfilemode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1114\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mOSError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1115\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfilemode\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodeDict\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mIsADirectoryError\u001b[0m: [Errno 21] Is a directory: '/content/NEU-DET'" + ] + } + ] }, { "cell_type": "code", @@ -61,7 +95,7 @@ "train_rolledin_scale_dir = os.path.join('/content/NEU-DET/train/images/rolled-in_scale/')\n", "train_scratches_dir = os.path.join('/content/NEU-DET/train/images/scratches/')" ], - "execution_count": null, + "execution_count": 6, "outputs": [] }, { @@ -79,7 +113,7 @@ "train_rolledin_scale_names = os.listdir(train_rolledin_scale_dir)\n", "train_scratches_names = os.listdir(train_scratches_dir)" ], - "execution_count": null, + "execution_count": 7, "outputs": [] }, { @@ -87,7 +121,11 @@ "metadata": { "id": "LwR8IU2v8Jo1", "colab_type": "code", - "colab": {} + "colab": { + "base_uri": "https://localhost:8080/", + "height": 121 + }, + "outputId": "6d785d96-d289-47fe-c3d6-5d2bd3204707" }, "source": [ "print('total training crazing images:', len(train_crazing_names))\n", @@ -97,8 +135,21 @@ "print('total training crazing images:', len(train_rolledin_scale_names))\n", "print('total training crazing images:', len(train_scratches_names))" ], - "execution_count": null, - "outputs": [] + "execution_count": 8, + "outputs": [ + { + "output_type": "stream", + "text": [ + "total training crazing images: 240\n", + "total training crazing images: 240\n", + "total training crazing images: 240\n", + "total training crazing images: 240\n", + "total training crazing images: 240\n", + "total training crazing images: 240\n" + ], + "name": "stdout" + } + ] }, { "cell_type": "code", @@ -110,7 +161,7 @@ "source": [ "import tensorflow as tf" ], - "execution_count": null, + "execution_count": 9, "outputs": [] }, { @@ -125,6 +176,945 @@ ], "execution_count": null, "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "KNb-XmhtXnKK", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "outputId": "a76a8e3c-ac71-4825-ba77-54d8e05f0648" + }, + "source": [ + "from keras.applications.inception_v3 import InceptionV3\n", + "from keras.models import Model\n", + "from keras.layers import Dense, GlobalAveragePooling2D" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Using TensorFlow backend.\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pO136t_iX25f", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 72 + }, + "outputId": "188c5f8f-85c3-4afe-adf5-3abfc45209d6" + }, + "source": [ + "input_shape = (224, 224, 3)\n", + "# load InceptionV3 from Keras\n", + "InceptionV3_model = InceptionV3(include_top=False, input_shape=input_shape)" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.5/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5\n", + "87916544/87910968 [==============================] - 8s 0us/step\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3b-oZ48qYEeI", + "colab_type": "code", + "colab": {} + }, + "source": [ + "num_classes = 6\n", + "# add custom Layers\n", + "x = InceptionV3_model.output\n", + "x = GlobalAveragePooling2D()(x)\n", + "x = Dense(512, activation=\"relu\")(x)\n", + "Custom_Output = Dense(num_classes, activation='softmax')(x)" + ], + "execution_count": 12, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "baTFJN-3Y_BW", + "colab_type": "code", + "colab": {} + }, + "source": [ + "model = Model(inputs = InceptionV3_model.input, outputs = Custom_Output)" + ], + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "pgml8PvSZFTB", + "colab_type": "code", + "colab": {} + }, + "source": [ + "# compile the model\n", + "model.compile(loss='categorical_crossentropy',\n", + " optimizer='adam',\n", + " metrics=['accuracy'])" + ], + "execution_count": 14, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "gH-9T1PfZGh_", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "6289bad1-0933-4dce-f1a0-bace5550ae1d" + }, + "source": [ + "model.summary()" + ], + "execution_count": 15, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Model: \"model_1\"\n", + "__________________________________________________________________________________________________\n", + "Layer (type) Output Shape Param # Connected to \n", + "==================================================================================================\n", + "input_1 (InputLayer) (None, 224, 224, 3) 0 \n", + "__________________________________________________________________________________________________\n", + "conv2d_1 (Conv2D) (None, 111, 111, 32) 864 input_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_1 (BatchNor (None, 111, 111, 32) 96 conv2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_1 (Activation) (None, 111, 111, 32) 0 batch_normalization_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_2 (Conv2D) (None, 109, 109, 32) 9216 activation_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_2 (BatchNor (None, 109, 109, 32) 96 conv2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_2 (Activation) (None, 109, 109, 32) 0 batch_normalization_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_3 (Conv2D) (None, 109, 109, 64) 18432 activation_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_3 (BatchNor (None, 109, 109, 64) 192 conv2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_3 (Activation) (None, 109, 109, 64) 0 batch_normalization_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_1 (MaxPooling2D) (None, 54, 54, 64) 0 activation_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_4 (Conv2D) (None, 54, 54, 80) 5120 max_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_4 (BatchNor (None, 54, 54, 80) 240 conv2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_4 (Activation) (None, 54, 54, 80) 0 batch_normalization_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_5 (Conv2D) (None, 52, 52, 192) 138240 activation_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_5 (BatchNor (None, 52, 52, 192) 576 conv2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_5 (Activation) (None, 52, 52, 192) 0 batch_normalization_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_2 (MaxPooling2D) (None, 25, 25, 192) 0 activation_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_9 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_9 (BatchNor (None, 25, 25, 64) 192 conv2d_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_9 (Activation) (None, 25, 25, 64) 0 batch_normalization_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_7 (Conv2D) (None, 25, 25, 48) 9216 max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_10 (Conv2D) (None, 25, 25, 96) 55296 activation_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_7 (BatchNor (None, 25, 25, 48) 144 conv2d_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_10 (BatchNo (None, 25, 25, 96) 288 conv2d_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_7 (Activation) (None, 25, 25, 48) 0 batch_normalization_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_10 (Activation) (None, 25, 25, 96) 0 batch_normalization_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_1 (AveragePoo (None, 25, 25, 192) 0 max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_6 (Conv2D) (None, 25, 25, 64) 12288 max_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_8 (Conv2D) (None, 25, 25, 64) 76800 activation_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_11 (Conv2D) (None, 25, 25, 96) 82944 activation_10[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_12 (Conv2D) (None, 25, 25, 32) 6144 average_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_6 (BatchNor (None, 25, 25, 64) 192 conv2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_8 (BatchNor (None, 25, 25, 64) 192 conv2d_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_11 (BatchNo (None, 25, 25, 96) 288 conv2d_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_12 (BatchNo (None, 25, 25, 32) 96 conv2d_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_6 (Activation) (None, 25, 25, 64) 0 batch_normalization_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_8 (Activation) (None, 25, 25, 64) 0 batch_normalization_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_11 (Activation) (None, 25, 25, 96) 0 batch_normalization_11[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_12 (Activation) (None, 25, 25, 32) 0 batch_normalization_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed0 (Concatenate) (None, 25, 25, 256) 0 activation_6[0][0] \n", + " activation_8[0][0] \n", + " activation_11[0][0] \n", + " activation_12[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_16 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_16 (BatchNo (None, 25, 25, 64) 192 conv2d_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_16 (Activation) (None, 25, 25, 64) 0 batch_normalization_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_14 (Conv2D) (None, 25, 25, 48) 12288 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_17 (Conv2D) (None, 25, 25, 96) 55296 activation_16[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_14 (BatchNo (None, 25, 25, 48) 144 conv2d_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_17 (BatchNo (None, 25, 25, 96) 288 conv2d_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_14 (Activation) (None, 25, 25, 48) 0 batch_normalization_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_17 (Activation) (None, 25, 25, 96) 0 batch_normalization_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_2 (AveragePoo (None, 25, 25, 256) 0 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_13 (Conv2D) (None, 25, 25, 64) 16384 mixed0[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_15 (Conv2D) (None, 25, 25, 64) 76800 activation_14[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_18 (Conv2D) (None, 25, 25, 96) 82944 activation_17[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_19 (Conv2D) (None, 25, 25, 64) 16384 average_pooling2d_2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_13 (BatchNo (None, 25, 25, 64) 192 conv2d_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_15 (BatchNo (None, 25, 25, 64) 192 conv2d_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_18 (BatchNo (None, 25, 25, 96) 288 conv2d_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_19 (BatchNo (None, 25, 25, 64) 192 conv2d_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_13 (Activation) (None, 25, 25, 64) 0 batch_normalization_13[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_15 (Activation) (None, 25, 25, 64) 0 batch_normalization_15[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_18 (Activation) (None, 25, 25, 96) 0 batch_normalization_18[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_19 (Activation) (None, 25, 25, 64) 0 batch_normalization_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed1 (Concatenate) (None, 25, 25, 288) 0 activation_13[0][0] \n", + " activation_15[0][0] \n", + " activation_18[0][0] \n", + " activation_19[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_23 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_23 (BatchNo (None, 25, 25, 64) 192 conv2d_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_23 (Activation) (None, 25, 25, 64) 0 batch_normalization_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_21 (Conv2D) (None, 25, 25, 48) 13824 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_24 (Conv2D) (None, 25, 25, 96) 55296 activation_23[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_21 (BatchNo (None, 25, 25, 48) 144 conv2d_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_24 (BatchNo (None, 25, 25, 96) 288 conv2d_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_21 (Activation) (None, 25, 25, 48) 0 batch_normalization_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_24 (Activation) (None, 25, 25, 96) 0 batch_normalization_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_3 (AveragePoo (None, 25, 25, 288) 0 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_20 (Conv2D) (None, 25, 25, 64) 18432 mixed1[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_22 (Conv2D) (None, 25, 25, 64) 76800 activation_21[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_25 (Conv2D) (None, 25, 25, 96) 82944 activation_24[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_26 (Conv2D) (None, 25, 25, 64) 18432 average_pooling2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_20 (BatchNo (None, 25, 25, 64) 192 conv2d_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_22 (BatchNo (None, 25, 25, 64) 192 conv2d_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_25 (BatchNo (None, 25, 25, 96) 288 conv2d_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_26 (BatchNo (None, 25, 25, 64) 192 conv2d_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_20 (Activation) (None, 25, 25, 64) 0 batch_normalization_20[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_22 (Activation) (None, 25, 25, 64) 0 batch_normalization_22[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_25 (Activation) (None, 25, 25, 96) 0 batch_normalization_25[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_26 (Activation) (None, 25, 25, 64) 0 batch_normalization_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed2 (Concatenate) (None, 25, 25, 288) 0 activation_20[0][0] \n", + " activation_22[0][0] \n", + " activation_25[0][0] \n", + " activation_26[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_28 (Conv2D) (None, 25, 25, 64) 18432 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_28 (BatchNo (None, 25, 25, 64) 192 conv2d_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_28 (Activation) (None, 25, 25, 64) 0 batch_normalization_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_29 (Conv2D) (None, 25, 25, 96) 55296 activation_28[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_29 (BatchNo (None, 25, 25, 96) 288 conv2d_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_29 (Activation) (None, 25, 25, 96) 0 batch_normalization_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_27 (Conv2D) (None, 12, 12, 384) 995328 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_30 (Conv2D) (None, 12, 12, 96) 82944 activation_29[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_27 (BatchNo (None, 12, 12, 384) 1152 conv2d_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_30 (BatchNo (None, 12, 12, 96) 288 conv2d_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_27 (Activation) (None, 12, 12, 384) 0 batch_normalization_27[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_30 (Activation) (None, 12, 12, 96) 0 batch_normalization_30[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_3 (MaxPooling2D) (None, 12, 12, 288) 0 mixed2[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed3 (Concatenate) (None, 12, 12, 768) 0 activation_27[0][0] \n", + " activation_30[0][0] \n", + " max_pooling2d_3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_35 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_35 (BatchNo (None, 12, 12, 128) 384 conv2d_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_35 (Activation) (None, 12, 12, 128) 0 batch_normalization_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_36 (Conv2D) (None, 12, 12, 128) 114688 activation_35[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_36 (BatchNo (None, 12, 12, 128) 384 conv2d_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_36 (Activation) (None, 12, 12, 128) 0 batch_normalization_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_32 (Conv2D) (None, 12, 12, 128) 98304 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_37 (Conv2D) (None, 12, 12, 128) 114688 activation_36[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_32 (BatchNo (None, 12, 12, 128) 384 conv2d_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_37 (BatchNo (None, 12, 12, 128) 384 conv2d_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_32 (Activation) (None, 12, 12, 128) 0 batch_normalization_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_37 (Activation) (None, 12, 12, 128) 0 batch_normalization_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_33 (Conv2D) (None, 12, 12, 128) 114688 activation_32[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_38 (Conv2D) (None, 12, 12, 128) 114688 activation_37[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_33 (BatchNo (None, 12, 12, 128) 384 conv2d_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_38 (BatchNo (None, 12, 12, 128) 384 conv2d_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_33 (Activation) (None, 12, 12, 128) 0 batch_normalization_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_38 (Activation) (None, 12, 12, 128) 0 batch_normalization_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_4 (AveragePoo (None, 12, 12, 768) 0 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_31 (Conv2D) (None, 12, 12, 192) 147456 mixed3[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_34 (Conv2D) (None, 12, 12, 192) 172032 activation_33[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_39 (Conv2D) (None, 12, 12, 192) 172032 activation_38[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_40 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_31 (BatchNo (None, 12, 12, 192) 576 conv2d_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_34 (BatchNo (None, 12, 12, 192) 576 conv2d_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_39 (BatchNo (None, 12, 12, 192) 576 conv2d_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_40 (BatchNo (None, 12, 12, 192) 576 conv2d_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_31 (Activation) (None, 12, 12, 192) 0 batch_normalization_31[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_34 (Activation) (None, 12, 12, 192) 0 batch_normalization_34[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_39 (Activation) (None, 12, 12, 192) 0 batch_normalization_39[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_40 (Activation) (None, 12, 12, 192) 0 batch_normalization_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed4 (Concatenate) (None, 12, 12, 768) 0 activation_31[0][0] \n", + " activation_34[0][0] \n", + " activation_39[0][0] \n", + " activation_40[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_45 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_45 (BatchNo (None, 12, 12, 160) 480 conv2d_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_45 (Activation) (None, 12, 12, 160) 0 batch_normalization_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_46 (Conv2D) (None, 12, 12, 160) 179200 activation_45[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_46 (BatchNo (None, 12, 12, 160) 480 conv2d_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_46 (Activation) (None, 12, 12, 160) 0 batch_normalization_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_42 (Conv2D) (None, 12, 12, 160) 122880 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_47 (Conv2D) (None, 12, 12, 160) 179200 activation_46[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_42 (BatchNo (None, 12, 12, 160) 480 conv2d_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_47 (BatchNo (None, 12, 12, 160) 480 conv2d_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_42 (Activation) (None, 12, 12, 160) 0 batch_normalization_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_47 (Activation) (None, 12, 12, 160) 0 batch_normalization_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_43 (Conv2D) (None, 12, 12, 160) 179200 activation_42[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_48 (Conv2D) (None, 12, 12, 160) 179200 activation_47[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_43 (BatchNo (None, 12, 12, 160) 480 conv2d_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_48 (BatchNo (None, 12, 12, 160) 480 conv2d_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_43 (Activation) (None, 12, 12, 160) 0 batch_normalization_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_48 (Activation) (None, 12, 12, 160) 0 batch_normalization_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_5 (AveragePoo (None, 12, 12, 768) 0 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_41 (Conv2D) (None, 12, 12, 192) 147456 mixed4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_44 (Conv2D) (None, 12, 12, 192) 215040 activation_43[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_49 (Conv2D) (None, 12, 12, 192) 215040 activation_48[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_50 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_41 (BatchNo (None, 12, 12, 192) 576 conv2d_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_44 (BatchNo (None, 12, 12, 192) 576 conv2d_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_49 (BatchNo (None, 12, 12, 192) 576 conv2d_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_50 (BatchNo (None, 12, 12, 192) 576 conv2d_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_41 (Activation) (None, 12, 12, 192) 0 batch_normalization_41[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_44 (Activation) (None, 12, 12, 192) 0 batch_normalization_44[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_49 (Activation) (None, 12, 12, 192) 0 batch_normalization_49[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_50 (Activation) (None, 12, 12, 192) 0 batch_normalization_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed5 (Concatenate) (None, 12, 12, 768) 0 activation_41[0][0] \n", + " activation_44[0][0] \n", + " activation_49[0][0] \n", + " activation_50[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_55 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_55 (BatchNo (None, 12, 12, 160) 480 conv2d_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_55 (Activation) (None, 12, 12, 160) 0 batch_normalization_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_56 (Conv2D) (None, 12, 12, 160) 179200 activation_55[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_56 (BatchNo (None, 12, 12, 160) 480 conv2d_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_56 (Activation) (None, 12, 12, 160) 0 batch_normalization_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_52 (Conv2D) (None, 12, 12, 160) 122880 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_57 (Conv2D) (None, 12, 12, 160) 179200 activation_56[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_52 (BatchNo (None, 12, 12, 160) 480 conv2d_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_57 (BatchNo (None, 12, 12, 160) 480 conv2d_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_52 (Activation) (None, 12, 12, 160) 0 batch_normalization_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_57 (Activation) (None, 12, 12, 160) 0 batch_normalization_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_53 (Conv2D) (None, 12, 12, 160) 179200 activation_52[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_58 (Conv2D) (None, 12, 12, 160) 179200 activation_57[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_53 (BatchNo (None, 12, 12, 160) 480 conv2d_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_58 (BatchNo (None, 12, 12, 160) 480 conv2d_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_53 (Activation) (None, 12, 12, 160) 0 batch_normalization_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_58 (Activation) (None, 12, 12, 160) 0 batch_normalization_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_6 (AveragePoo (None, 12, 12, 768) 0 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_51 (Conv2D) (None, 12, 12, 192) 147456 mixed5[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_54 (Conv2D) (None, 12, 12, 192) 215040 activation_53[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_59 (Conv2D) (None, 12, 12, 192) 215040 activation_58[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_60 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_51 (BatchNo (None, 12, 12, 192) 576 conv2d_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_54 (BatchNo (None, 12, 12, 192) 576 conv2d_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_59 (BatchNo (None, 12, 12, 192) 576 conv2d_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_60 (BatchNo (None, 12, 12, 192) 576 conv2d_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_51 (Activation) (None, 12, 12, 192) 0 batch_normalization_51[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_54 (Activation) (None, 12, 12, 192) 0 batch_normalization_54[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_59 (Activation) (None, 12, 12, 192) 0 batch_normalization_59[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_60 (Activation) (None, 12, 12, 192) 0 batch_normalization_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed6 (Concatenate) (None, 12, 12, 768) 0 activation_51[0][0] \n", + " activation_54[0][0] \n", + " activation_59[0][0] \n", + " activation_60[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_65 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_65 (BatchNo (None, 12, 12, 192) 576 conv2d_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_65 (Activation) (None, 12, 12, 192) 0 batch_normalization_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_66 (Conv2D) (None, 12, 12, 192) 258048 activation_65[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_66 (BatchNo (None, 12, 12, 192) 576 conv2d_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_66 (Activation) (None, 12, 12, 192) 0 batch_normalization_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_62 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_67 (Conv2D) (None, 12, 12, 192) 258048 activation_66[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_62 (BatchNo (None, 12, 12, 192) 576 conv2d_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_67 (BatchNo (None, 12, 12, 192) 576 conv2d_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_62 (Activation) (None, 12, 12, 192) 0 batch_normalization_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_67 (Activation) (None, 12, 12, 192) 0 batch_normalization_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_63 (Conv2D) (None, 12, 12, 192) 258048 activation_62[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_68 (Conv2D) (None, 12, 12, 192) 258048 activation_67[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_63 (BatchNo (None, 12, 12, 192) 576 conv2d_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_68 (BatchNo (None, 12, 12, 192) 576 conv2d_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_63 (Activation) (None, 12, 12, 192) 0 batch_normalization_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_68 (Activation) (None, 12, 12, 192) 0 batch_normalization_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_7 (AveragePoo (None, 12, 12, 768) 0 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_61 (Conv2D) (None, 12, 12, 192) 147456 mixed6[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_64 (Conv2D) (None, 12, 12, 192) 258048 activation_63[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_69 (Conv2D) (None, 12, 12, 192) 258048 activation_68[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_70 (Conv2D) (None, 12, 12, 192) 147456 average_pooling2d_7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_61 (BatchNo (None, 12, 12, 192) 576 conv2d_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_64 (BatchNo (None, 12, 12, 192) 576 conv2d_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_69 (BatchNo (None, 12, 12, 192) 576 conv2d_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_70 (BatchNo (None, 12, 12, 192) 576 conv2d_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_61 (Activation) (None, 12, 12, 192) 0 batch_normalization_61[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_64 (Activation) (None, 12, 12, 192) 0 batch_normalization_64[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_69 (Activation) (None, 12, 12, 192) 0 batch_normalization_69[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_70 (Activation) (None, 12, 12, 192) 0 batch_normalization_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed7 (Concatenate) (None, 12, 12, 768) 0 activation_61[0][0] \n", + " activation_64[0][0] \n", + " activation_69[0][0] \n", + " activation_70[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_73 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_73 (BatchNo (None, 12, 12, 192) 576 conv2d_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_73 (Activation) (None, 12, 12, 192) 0 batch_normalization_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_74 (Conv2D) (None, 12, 12, 192) 258048 activation_73[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_74 (BatchNo (None, 12, 12, 192) 576 conv2d_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_74 (Activation) (None, 12, 12, 192) 0 batch_normalization_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_71 (Conv2D) (None, 12, 12, 192) 147456 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_75 (Conv2D) (None, 12, 12, 192) 258048 activation_74[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_71 (BatchNo (None, 12, 12, 192) 576 conv2d_71[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_75 (BatchNo (None, 12, 12, 192) 576 conv2d_75[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_71 (Activation) (None, 12, 12, 192) 0 batch_normalization_71[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_75 (Activation) (None, 12, 12, 192) 0 batch_normalization_75[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_72 (Conv2D) (None, 5, 5, 320) 552960 activation_71[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_76 (Conv2D) (None, 5, 5, 192) 331776 activation_75[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_72 (BatchNo (None, 5, 5, 320) 960 conv2d_72[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_76 (BatchNo (None, 5, 5, 192) 576 conv2d_76[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_72 (Activation) (None, 5, 5, 320) 0 batch_normalization_72[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_76 (Activation) (None, 5, 5, 192) 0 batch_normalization_76[0][0] \n", + "__________________________________________________________________________________________________\n", + "max_pooling2d_4 (MaxPooling2D) (None, 5, 5, 768) 0 mixed7[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed8 (Concatenate) (None, 5, 5, 1280) 0 activation_72[0][0] \n", + " activation_76[0][0] \n", + " max_pooling2d_4[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_81 (Conv2D) (None, 5, 5, 448) 573440 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_81 (BatchNo (None, 5, 5, 448) 1344 conv2d_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_81 (Activation) (None, 5, 5, 448) 0 batch_normalization_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_78 (Conv2D) (None, 5, 5, 384) 491520 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_82 (Conv2D) (None, 5, 5, 384) 1548288 activation_81[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_78 (BatchNo (None, 5, 5, 384) 1152 conv2d_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_82 (BatchNo (None, 5, 5, 384) 1152 conv2d_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_78 (Activation) (None, 5, 5, 384) 0 batch_normalization_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_82 (Activation) (None, 5, 5, 384) 0 batch_normalization_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_79 (Conv2D) (None, 5, 5, 384) 442368 activation_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_80 (Conv2D) (None, 5, 5, 384) 442368 activation_78[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_83 (Conv2D) (None, 5, 5, 384) 442368 activation_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_84 (Conv2D) (None, 5, 5, 384) 442368 activation_82[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_8 (AveragePoo (None, 5, 5, 1280) 0 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_77 (Conv2D) (None, 5, 5, 320) 409600 mixed8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_79 (BatchNo (None, 5, 5, 384) 1152 conv2d_79[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_80 (BatchNo (None, 5, 5, 384) 1152 conv2d_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_83 (BatchNo (None, 5, 5, 384) 1152 conv2d_83[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_84 (BatchNo (None, 5, 5, 384) 1152 conv2d_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_85 (Conv2D) (None, 5, 5, 192) 245760 average_pooling2d_8[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_77 (BatchNo (None, 5, 5, 320) 960 conv2d_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_79 (Activation) (None, 5, 5, 384) 0 batch_normalization_79[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_80 (Activation) (None, 5, 5, 384) 0 batch_normalization_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_83 (Activation) (None, 5, 5, 384) 0 batch_normalization_83[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_84 (Activation) (None, 5, 5, 384) 0 batch_normalization_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_85 (BatchNo (None, 5, 5, 192) 576 conv2d_85[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_77 (Activation) (None, 5, 5, 320) 0 batch_normalization_77[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9_0 (Concatenate) (None, 5, 5, 768) 0 activation_79[0][0] \n", + " activation_80[0][0] \n", + "__________________________________________________________________________________________________\n", + "concatenate_1 (Concatenate) (None, 5, 5, 768) 0 activation_83[0][0] \n", + " activation_84[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_85 (Activation) (None, 5, 5, 192) 0 batch_normalization_85[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9 (Concatenate) (None, 5, 5, 2048) 0 activation_77[0][0] \n", + " mixed9_0[0][0] \n", + " concatenate_1[0][0] \n", + " activation_85[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_90 (Conv2D) (None, 5, 5, 448) 917504 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_90 (BatchNo (None, 5, 5, 448) 1344 conv2d_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_90 (Activation) (None, 5, 5, 448) 0 batch_normalization_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_87 (Conv2D) (None, 5, 5, 384) 786432 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_91 (Conv2D) (None, 5, 5, 384) 1548288 activation_90[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_87 (BatchNo (None, 5, 5, 384) 1152 conv2d_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_91 (BatchNo (None, 5, 5, 384) 1152 conv2d_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_87 (Activation) (None, 5, 5, 384) 0 batch_normalization_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_91 (Activation) (None, 5, 5, 384) 0 batch_normalization_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_88 (Conv2D) (None, 5, 5, 384) 442368 activation_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_89 (Conv2D) (None, 5, 5, 384) 442368 activation_87[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_92 (Conv2D) (None, 5, 5, 384) 442368 activation_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_93 (Conv2D) (None, 5, 5, 384) 442368 activation_91[0][0] \n", + "__________________________________________________________________________________________________\n", + "average_pooling2d_9 (AveragePoo (None, 5, 5, 2048) 0 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_86 (Conv2D) (None, 5, 5, 320) 655360 mixed9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_88 (BatchNo (None, 5, 5, 384) 1152 conv2d_88[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_89 (BatchNo (None, 5, 5, 384) 1152 conv2d_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_92 (BatchNo (None, 5, 5, 384) 1152 conv2d_92[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_93 (BatchNo (None, 5, 5, 384) 1152 conv2d_93[0][0] \n", + "__________________________________________________________________________________________________\n", + "conv2d_94 (Conv2D) (None, 5, 5, 192) 393216 average_pooling2d_9[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_86 (BatchNo (None, 5, 5, 320) 960 conv2d_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_88 (Activation) (None, 5, 5, 384) 0 batch_normalization_88[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_89 (Activation) (None, 5, 5, 384) 0 batch_normalization_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_92 (Activation) (None, 5, 5, 384) 0 batch_normalization_92[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_93 (Activation) (None, 5, 5, 384) 0 batch_normalization_93[0][0] \n", + "__________________________________________________________________________________________________\n", + "batch_normalization_94 (BatchNo (None, 5, 5, 192) 576 conv2d_94[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_86 (Activation) (None, 5, 5, 320) 0 batch_normalization_86[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed9_1 (Concatenate) (None, 5, 5, 768) 0 activation_88[0][0] \n", + " activation_89[0][0] \n", + "__________________________________________________________________________________________________\n", + "concatenate_2 (Concatenate) (None, 5, 5, 768) 0 activation_92[0][0] \n", + " activation_93[0][0] \n", + "__________________________________________________________________________________________________\n", + "activation_94 (Activation) (None, 5, 5, 192) 0 batch_normalization_94[0][0] \n", + "__________________________________________________________________________________________________\n", + "mixed10 (Concatenate) (None, 5, 5, 2048) 0 activation_86[0][0] \n", + " mixed9_1[0][0] \n", + " concatenate_2[0][0] \n", + " activation_94[0][0] \n", + "__________________________________________________________________________________________________\n", + "global_average_pooling2d_1 (Glo (None, 2048) 0 mixed10[0][0] \n", + "__________________________________________________________________________________________________\n", + "dense_1 (Dense) (None, 512) 1049088 global_average_pooling2d_1[0][0] \n", + "__________________________________________________________________________________________________\n", + "dense_2 (Dense) (None, 6) 3078 dense_1[0][0] \n", + "==================================================================================================\n", + "Total params: 22,854,950\n", + "Trainable params: 22,820,518\n", + "Non-trainable params: 34,432\n", + "__________________________________________________________________________________________________\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "f36tPFiG8KAQ", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "outputId": "8ebb6085-9a85-4a13-8d37-2bcd4ec1162a" + }, + "source": [ + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "\n", + "# Rescaling images\n", + "train_datagen = ImageDataGenerator(rescale=1/255)\n", + "\n", + "train_generator = train_datagen.flow_from_directory(\n", + " directory = '/content/NEU-DET/train/images/',\n", + " target_size = (224, 224),\n", + " batch_size = 60,\n", + " class_mode = 'categorical'\n", + ")" + ], + "execution_count": 16, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Found 1440 images belonging to 6 classes.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "LKK0dbtr8YQu", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 35 + }, + "outputId": "dd45061e-c6a7-41cc-9eae-f75bfa84933d" + }, + "source": [ + "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", + "\n", + "# Rescaling images\n", + "train_datagen = ImageDataGenerator(rescale=1/255)\n", + "\n", + "train_generator_y = train_datagen.flow_from_directory(\n", + " directory = '/content/NEU-DET/train/images/',\n", + " target_size = (224, 224),\n", + " batch_size = 60,\n", + " class_mode = 'categorical'\n", + ")" + ], + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Found 1440 images belonging to 6 classes.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "SHfaql80ZOV_", + "colab_type": "code", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 537 + }, + "outputId": "8c164beb-bd86-487a-b1fa-817f2f14b2be" + }, + "source": [ + "history = model.fit(\n", + " train_generator,\n", + " steps_per_epoch=8,\n", + " epochs=15,\n", + " verbose=1,\n", + ")" + ], + "execution_count": 18, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Epoch 1/15\n", + "8/8 [==============================] - 35s 4s/step - loss: 0.8450 - accuracy: 0.7333\n", + "Epoch 2/15\n", + "8/8 [==============================] - 6s 691ms/step - loss: 0.1222 - accuracy: 0.9729\n", + "Epoch 3/15\n", + "8/8 [==============================] - 5s 687ms/step - loss: 0.2477 - accuracy: 0.9417\n", + "Epoch 4/15\n", + "8/8 [==============================] - 6s 699ms/step - loss: 0.0692 - accuracy: 0.9750\n", + "Epoch 5/15\n", + "8/8 [==============================] - 6s 697ms/step - loss: 0.0409 - accuracy: 0.9833\n", + "Epoch 6/15\n", + "8/8 [==============================] - 6s 697ms/step - loss: 0.0469 - accuracy: 0.9854\n", + "Epoch 7/15\n", + "8/8 [==============================] - 6s 709ms/step - loss: 0.1011 - accuracy: 0.9771\n", + "Epoch 8/15\n", + "8/8 [==============================] - 6s 709ms/step - loss: 0.0942 - accuracy: 0.9771\n", + "Epoch 9/15\n", + "8/8 [==============================] - 6s 707ms/step - loss: 0.0821 - accuracy: 0.9750\n", + "Epoch 10/15\n", + "8/8 [==============================] - 6s 719ms/step - loss: 0.1111 - accuracy: 0.9771\n", + "Epoch 11/15\n", + "8/8 [==============================] - 6s 720ms/step - loss: 0.1389 - accuracy: 0.9542\n", + "Epoch 12/15\n", + "8/8 [==============================] - 6s 721ms/step - loss: 0.0588 - accuracy: 0.9792\n", + "Epoch 13/15\n", + "8/8 [==============================] - 6s 733ms/step - loss: 0.0783 - accuracy: 0.9812\n", + "Epoch 14/15\n", + "8/8 [==============================] - 6s 736ms/step - loss: 0.1218 - accuracy: 0.9792\n", + "Epoch 15/15\n", + "8/8 [==============================] - 6s 736ms/step - loss: 0.1343 - accuracy: 0.9646\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ZWIS5fdr6qWT", + "colab_type": "code", + "colab": {} + }, + "source": [ + "" + ], + "execution_count": null, + "outputs": [] } ] } \ No newline at end of file