Jovian
⭐️
Sign In
In [2]:
!pip install jovian  -q
Building wheel for jovian (setup.py) ... done Building wheel for uuid (setup.py) ... done
In [1]:
import jovian
In [ ]:
from datetime import datetime
import keras
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, Flatten, Conv2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
import numpy as np


def alexnet(input_data_shape=(224, 224, 3, ), number_of_classes=10):
    model = Sequential()

    # 1st Convolutional Layer
    model.add(Conv2D(filters=96, input_shape=input_data_shape, kernel_size=(11, 11), strides=(4, 4), padding='valid'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    # Max Pooling
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'))
    # 2nd Convolutional Layer
    model.add(Conv2D(filters=256, kernel_size=(11, 11), strides=(1, 1), padding='valid'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    # Max Pooling
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'))
    # 3rd Convolutional Layer
    model.add(Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    # 4th Convolutional Layer
    model.add(Conv2D(filters=384, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    # 5th Convolutional Layer
    model.add(Conv2D(filters=256, kernel_size=(3, 3), strides=(1, 1), padding='valid'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    # Max Pooling
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='valid'))
    # Flatten the feature maps to pass them to Fully Connected Layers
    model.add(Flatten())

    # Fully Connected Layers
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dense(number_of_classes, activation='softmax'))

    model.summary()
    return model
In [ ]:
def vgg_16(input_data_shape=(224, 224, 3,), number_of_classes=10):
    model = Sequential()
    # Block 1
    model.add(Conv2D(filters=64, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=64, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 2
    model.add(Conv2D(filters=128, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=128, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 3
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 4
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 5
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Flatten the feature maps to pass them to Fully Connected Layers
    model.add(Flatten())

    # fully connected layers
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dropout(0.5))
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dense(number_of_classes))
    model.add(BatchNormalization())
    model.add(Activation('softmax'))
    model.add(Dropout(0.5))

    # Create model.
    model.summary()

    return model
In [ ]:
def Cnn(input_data_shape=(224, 224, 3,), number_of_classes=10):
    model = Sequential()
    # Block 1
    model.add(Conv2D(filters=64, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=64, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 2
    model.add(Conv2D(filters=128, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=128, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 3
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=256, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))
    # Block 4
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Block 5
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=512, input_shape=input_data_shape, kernel_size=(5, 5), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
    model.add(Dropout(0.5))

    # Flatten the feature maps to pass them to Fully Connected Layers
    model.add(Flatten())

    # fully connected layers
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dense(4096))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Dense(number_of_classes))
    model.add(BatchNormalization())
    model.add(Activation('softmax'))
    model.add(Dropout(0.5))


    # Create model.
    model.summary()

    return model
In [ ]:
batch_size = 128
num_classes = 10
num_of_training_iteration = 100
In [ ]:
input_data_shape = (224, 224, 3)
In [57]:
alexnet_model = alexnet(input_data_shape=input_data_shape)
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_108 (Conv2D) (None, 54, 54, 96) 34944 _________________________________________________________________ batch_normalization_75 (Batc (None, 54, 54, 96) 384 _________________________________________________________________ activation_1 (Activation) (None, 54, 54, 96) 0 _________________________________________________________________ max_pooling2d_46 (MaxPooling (None, 27, 27, 96) 0 _________________________________________________________________ conv2d_109 (Conv2D) (None, 17, 17, 256) 2973952 _________________________________________________________________ batch_normalization_76 (Batc (None, 17, 17, 256) 1024 _________________________________________________________________ activation_2 (Activation) (None, 17, 17, 256) 0 _________________________________________________________________ max_pooling2d_47 (MaxPooling (None, 8, 8, 256) 0 _________________________________________________________________ conv2d_110 (Conv2D) (None, 6, 6, 384) 885120 _________________________________________________________________ batch_normalization_77 (Batc (None, 6, 6, 384) 1536 _________________________________________________________________ activation_3 (Activation) (None, 6, 6, 384) 0 _________________________________________________________________ conv2d_111 (Conv2D) (None, 4, 4, 384) 1327488 _________________________________________________________________ batch_normalization_78 (Batc (None, 4, 4, 384) 1536 _________________________________________________________________ activation_4 (Activation) (None, 4, 4, 384) 0 _________________________________________________________________ conv2d_112 (Conv2D) (None, 2, 2, 256) 884992 _________________________________________________________________ batch_normalization_79 (Batc (None, 2, 2, 256) 1024 _________________________________________________________________ activation_5 (Activation) (None, 2, 2, 256) 0 _________________________________________________________________ max_pooling2d_48 (MaxPooling (None, 1, 1, 256) 0 _________________________________________________________________ flatten_11 (Flatten) (None, 256) 0 _________________________________________________________________ dense_31 (Dense) (None, 4096) 1052672 _________________________________________________________________ batch_normalization_80 (Batc (None, 4096) 16384 _________________________________________________________________ activation_6 (Activation) (None, 4096) 0 _________________________________________________________________ dense_32 (Dense) (None, 4096) 16781312 _________________________________________________________________ batch_normalization_81 (Batc (None, 4096) 16384 _________________________________________________________________ activation_7 (Activation) (None, 4096) 0 _________________________________________________________________ dense_33 (Dense) (None, 10) 40970 ================================================================= Total params: 24,019,722 Trainable params: 24,000,586 Non-trainable params: 19,136 _________________________________________________________________
In [61]:
vgg16_model = vgg_16(input_data_shape=input_data_shape)
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_113 (Conv2D) (None, 224, 224, 64) 1792 _________________________________________________________________ batch_normalization_82 (Batc (None, 224, 224, 64) 256 _________________________________________________________________ activation_8 (Activation) (None, 224, 224, 64) 0 _________________________________________________________________ conv2d_114 (Conv2D) (None, 224, 224, 64) 36928 _________________________________________________________________ batch_normalization_83 (Batc (None, 224, 224, 64) 256 _________________________________________________________________ activation_9 (Activation) (None, 224, 224, 64) 0 _________________________________________________________________ max_pooling2d_49 (MaxPooling (None, 112, 112, 64) 0 _________________________________________________________________ dropout_11 (Dropout) (None, 112, 112, 64) 0 _________________________________________________________________ conv2d_115 (Conv2D) (None, 112, 112, 128) 73856 _________________________________________________________________ batch_normalization_84 (Batc (None, 112, 112, 128) 512 _________________________________________________________________ activation_10 (Activation) (None, 112, 112, 128) 0 _________________________________________________________________ conv2d_116 (Conv2D) (None, 112, 112, 128) 147584 _________________________________________________________________ batch_normalization_85 (Batc (None, 112, 112, 128) 512 _________________________________________________________________ activation_11 (Activation) (None, 112, 112, 128) 0 _________________________________________________________________ max_pooling2d_50 (MaxPooling (None, 56, 56, 128) 0 _________________________________________________________________ dropout_12 (Dropout) (None, 56, 56, 128) 0 _________________________________________________________________ conv2d_117 (Conv2D) (None, 56, 56, 256) 295168 _________________________________________________________________ batch_normalization_86 (Batc (None, 56, 56, 256) 1024 _________________________________________________________________ activation_12 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ conv2d_118 (Conv2D) (None, 56, 56, 256) 590080 _________________________________________________________________ batch_normalization_87 (Batc (None, 56, 56, 256) 1024 _________________________________________________________________ activation_13 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ conv2d_119 (Conv2D) (None, 56, 56, 256) 590080 _________________________________________________________________ batch_normalization_88 (Batc (None, 56, 56, 256) 1024 _________________________________________________________________ activation_14 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ max_pooling2d_51 (MaxPooling (None, 28, 28, 256) 0 _________________________________________________________________ dropout_13 (Dropout) (None, 28, 28, 256) 0 _________________________________________________________________ conv2d_120 (Conv2D) (None, 28, 28, 512) 1180160 _________________________________________________________________ batch_normalization_89 (Batc (None, 28, 28, 512) 2048 _________________________________________________________________ activation_15 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ conv2d_121 (Conv2D) (None, 28, 28, 512) 2359808 _________________________________________________________________ batch_normalization_90 (Batc (None, 28, 28, 512) 2048 _________________________________________________________________ activation_16 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ conv2d_122 (Conv2D) (None, 28, 28, 512) 2359808 _________________________________________________________________ batch_normalization_91 (Batc (None, 28, 28, 512) 2048 _________________________________________________________________ activation_17 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ max_pooling2d_52 (MaxPooling (None, 14, 14, 512) 0 _________________________________________________________________ dropout_14 (Dropout) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_123 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ batch_normalization_92 (Batc (None, 14, 14, 512) 2048 _________________________________________________________________ activation_18 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_124 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ batch_normalization_93 (Batc (None, 14, 14, 512) 2048 _________________________________________________________________ activation_19 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_125 (Conv2D) (None, 14, 14, 512) 2359808 _________________________________________________________________ batch_normalization_94 (Batc (None, 14, 14, 512) 2048 _________________________________________________________________ activation_20 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ max_pooling2d_53 (MaxPooling (None, 7, 7, 512) 0 _________________________________________________________________ dropout_15 (Dropout) (None, 7, 7, 512) 0 _________________________________________________________________ flatten_12 (Flatten) (None, 25088) 0 _________________________________________________________________ dense_34 (Dense) (None, 4096) 102764544 _________________________________________________________________ batch_normalization_95 (Batc (None, 4096) 16384 _________________________________________________________________ activation_21 (Activation) (None, 4096) 0 _________________________________________________________________ dropout_16 (Dropout) (None, 4096) 0 _________________________________________________________________ dense_35 (Dense) (None, 4096) 16781312 _________________________________________________________________ batch_normalization_96 (Batc (None, 4096) 16384 _________________________________________________________________ activation_22 (Activation) (None, 4096) 0 _________________________________________________________________ dense_36 (Dense) (None, 10) 40970 _________________________________________________________________ batch_normalization_97 (Batc (None, 10) 40 _________________________________________________________________ activation_23 (Activation) (None, 10) 0 _________________________________________________________________ dropout_17 (Dropout) (None, 10) 0 ================================================================= Total params: 134,351,218 Trainable params: 134,326,366 Non-trainable params: 24,852 _________________________________________________________________
In [62]:
Cnn_model = Cnn(input_data_shape=input_data_shape)
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_126 (Conv2D) (None, 224, 224, 64) 4864 _________________________________________________________________ batch_normalization_98 (Batc (None, 224, 224, 64) 256 _________________________________________________________________ activation_24 (Activation) (None, 224, 224, 64) 0 _________________________________________________________________ conv2d_127 (Conv2D) (None, 224, 224, 64) 102464 _________________________________________________________________ batch_normalization_99 (Batc (None, 224, 224, 64) 256 _________________________________________________________________ activation_25 (Activation) (None, 224, 224, 64) 0 _________________________________________________________________ max_pooling2d_54 (MaxPooling (None, 112, 112, 64) 0 _________________________________________________________________ dropout_18 (Dropout) (None, 112, 112, 64) 0 _________________________________________________________________ conv2d_128 (Conv2D) (None, 112, 112, 128) 204928 _________________________________________________________________ batch_normalization_100 (Bat (None, 112, 112, 128) 512 _________________________________________________________________ activation_26 (Activation) (None, 112, 112, 128) 0 _________________________________________________________________ conv2d_129 (Conv2D) (None, 112, 112, 128) 409728 _________________________________________________________________ batch_normalization_101 (Bat (None, 112, 112, 128) 512 _________________________________________________________________ activation_27 (Activation) (None, 112, 112, 128) 0 _________________________________________________________________ max_pooling2d_55 (MaxPooling (None, 56, 56, 128) 0 _________________________________________________________________ dropout_19 (Dropout) (None, 56, 56, 128) 0 _________________________________________________________________ conv2d_130 (Conv2D) (None, 56, 56, 256) 819456 _________________________________________________________________ batch_normalization_102 (Bat (None, 56, 56, 256) 1024 _________________________________________________________________ activation_28 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ conv2d_131 (Conv2D) (None, 56, 56, 256) 1638656 _________________________________________________________________ batch_normalization_103 (Bat (None, 56, 56, 256) 1024 _________________________________________________________________ activation_29 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ conv2d_132 (Conv2D) (None, 56, 56, 256) 1638656 _________________________________________________________________ batch_normalization_104 (Bat (None, 56, 56, 256) 1024 _________________________________________________________________ activation_30 (Activation) (None, 56, 56, 256) 0 _________________________________________________________________ max_pooling2d_56 (MaxPooling (None, 28, 28, 256) 0 _________________________________________________________________ dropout_20 (Dropout) (None, 28, 28, 256) 0 _________________________________________________________________ conv2d_133 (Conv2D) (None, 28, 28, 512) 3277312 _________________________________________________________________ batch_normalization_105 (Bat (None, 28, 28, 512) 2048 _________________________________________________________________ activation_31 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ conv2d_134 (Conv2D) (None, 28, 28, 512) 6554112 _________________________________________________________________ batch_normalization_106 (Bat (None, 28, 28, 512) 2048 _________________________________________________________________ activation_32 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ conv2d_135 (Conv2D) (None, 28, 28, 512) 6554112 _________________________________________________________________ batch_normalization_107 (Bat (None, 28, 28, 512) 2048 _________________________________________________________________ activation_33 (Activation) (None, 28, 28, 512) 0 _________________________________________________________________ max_pooling2d_57 (MaxPooling (None, 14, 14, 512) 0 _________________________________________________________________ dropout_21 (Dropout) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_136 (Conv2D) (None, 14, 14, 512) 6554112 _________________________________________________________________ batch_normalization_108 (Bat (None, 14, 14, 512) 2048 _________________________________________________________________ activation_34 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_137 (Conv2D) (None, 14, 14, 512) 6554112 _________________________________________________________________ batch_normalization_109 (Bat (None, 14, 14, 512) 2048 _________________________________________________________________ activation_35 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ conv2d_138 (Conv2D) (None, 14, 14, 512) 6554112 _________________________________________________________________ batch_normalization_110 (Bat (None, 14, 14, 512) 2048 _________________________________________________________________ activation_36 (Activation) (None, 14, 14, 512) 0 _________________________________________________________________ max_pooling2d_58 (MaxPooling (None, 7, 7, 512) 0 _________________________________________________________________ dropout_22 (Dropout) (None, 7, 7, 512) 0 _________________________________________________________________ flatten_13 (Flatten) (None, 25088) 0 _________________________________________________________________ dense_37 (Dense) (None, 4096) 102764544 _________________________________________________________________ batch_normalization_111 (Bat (None, 4096) 16384 _________________________________________________________________ activation_37 (Activation) (None, 4096) 0 _________________________________________________________________ dense_38 (Dense) (None, 4096) 16781312 _________________________________________________________________ batch_normalization_112 (Bat (None, 4096) 16384 _________________________________________________________________ activation_38 (Activation) (None, 4096) 0 _________________________________________________________________ dense_39 (Dense) (None, 10) 40970 _________________________________________________________________ batch_normalization_113 (Bat (None, 10) 40 _________________________________________________________________ activation_39 (Activation) (None, 10) 0 _________________________________________________________________ dropout_23 (Dropout) (None, 10) 0 ================================================================= Total params: 160,503,154 Trainable params: 160,478,302 Non-trainable params: 24,852 _________________________________________________________________
In [58]:
alexnet_model.compile(loss=keras.losses.categorical_crossentropy, optimizer='adam', metrics=["accuracy"])

alexnet_inference_time = []

# dummy tensor to check the inference time of each network
x_test = np.random.rand(batch_size, input_data_shape[0], input_data_shape[1], input_data_shape[2])

for _ in range(num_of_training_iteration):
    alexnet_inference_start = datetime.now()
    alexnet_inference = alexnet_model.predict_classes(x_test)
    alexnet_inference_finish = datetime.now()
    alexnet_inference_time.append(alexnet_inference_finish - alexnet_inference_start)
print("Average Inference time for AlexNet: {}".format(np.mean(alexnet_inference_time)))

Average Inference time for AlexNet: 0:00:00.243962
In [63]:
vgg16_model.compile(loss=keras.losses.categorical_crossentropy, optimizer='adam', metrics=["accuracy"])
vgg16_inference_time = []
for _ in range(num_of_training_iteration):
    vgg16_inference_start = datetime.now()
    vgg16_inference = vgg16_model.predict_classes(x_test)
    vgg16_inference_finish = datetime.now()
    vgg16_inference_time.append(vgg16_inference_finish - vgg16_inference_start)

print("Average Inference time for VGG-16: {}".format(np.mean(vgg16_inference_time)))
Average Inference time for VGG-16: 0:00:01.471458
In [64]:
Cnn_model.compile(loss=keras.losses.categorical_crossentropy, optimizer='adam', metrics=["accuracy"])

Cnn_inference_time = []

# dummy tensor to check the inference time of each network
x_test = np.random.rand(batch_size, input_data_shape[0], input_data_shape[1], input_data_shape[2])

for _ in range(num_of_training_iteration):
    Cnn_inference_start = datetime.now()
    Cnn_inference = Cnn_model.predict_classes(x_test)
    Cnn_inference_finish = datetime.now()
    Cnn_inference_time.append(Cnn_inference_finish - Cnn_inference_start)
print("Average Inference time for Cnn: {}".format(np.mean(Cnn_inference_time)))

Average Inference time for Cnn: 0:00:02.105015
In [ ]:
jovian.commit()
In [2]:
jovian.log_metrics({
    'AlexNet_parameters':  24019722,
    'AlexNet_Inference_Time': '0:00:00.243962',
    'vgg_parameters': 134351218,
    'vgg_inference_Time': '0:00:01.471458',
    'Cnn_parameters': 160503154,
    'Cnn_Inference_Time': '0:00:02.105015'
})
[jovian] Metrics logged.
In [ ]:
jovian.commit()
[jovian] Saving notebook..
In [ ]: