Jovian
⭐️
Sign In
In [1]:
import keras
import jovian
Using TensorFlow backend.
In [2]:
import numpy as np

from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten, Add, BatchNormalization
from keras.layers import Convolution2D, MaxPooling2D, SeparableConv2D
from keras.utils import np_utils

from keras.datasets import mnist

from pprint import pprint
In [3]:
(X_train, y_train), (X_test, y_test) = mnist.load_data()
In [4]:
print (X_train.shape)
from matplotlib import pyplot as plt
%matplotlib inline
plt.imshow(X_train[6], cmap='gray')
(60000, 28, 28)
Out[4]:
<matplotlib.image.AxesImage at 0x7f9d0900beb8>
Notebook Image
In [5]:
X_train = X_train.reshape(X_train.shape[0], 28, 28,1)
X_test = X_test.reshape(X_test.shape[0], 28, 28,1)
In [6]:
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
In [7]:
# Convert 1-dimensional class arrays to 10-dimensional class matrices
Y_train = np_utils.to_categorical(y_train, 10)
Y_test = np_utils.to_categorical(y_test, 10)
In [8]:
sfs = Sequential()

 
sfs.add(Convolution2D(16, 3, 3, activation='relu', input_shape=(28,28,1)))
sfs.add(SeparableConv2D(filters=32, kernel_size=(3,3), activation='relu'))
sfs.add(SeparableConv2D(filters=64, kernel_size=(3,3), activation='relu'))
sfs.add(Convolution2D(32, 1, 1, activation='relu'))
sfs.add(SeparableConv2D(filters=32, kernel_size=(13,13), activation='relu'))
sfs.add(Dropout(0.1))
sfs.add(SeparableConv2D(filters=10, kernel_size=(10,10), activation='relu'))
sfs.add(Flatten())
sfs.add(Activation('softmax'))
WARNING: Logging before flag parsing goes to stderr. W0731 11:06:38.101219 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:74: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead. /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/ipykernel_launcher.py:4: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(16, (3, 3), activation="relu", input_shape=(28, 28, 1...)` after removing the cwd from sys.path. W0731 11:06:38.119383 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:517: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead. W0731 11:06:38.122604 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4138: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead. /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/ipykernel_launcher.py:7: UserWarning: Update your `Conv2D` call to the Keras 2 API: `Conv2D(32, (1, 1), activation="relu")` import sys W0731 11:06:38.204623 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:133: The name tf.placeholder_with_default is deprecated. Please use tf.compat.v1.placeholder_with_default instead. W0731 11:06:38.212360 140314462373696 deprecation.py:506] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:3445: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version. Instructions for updating: Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.
In [9]:
sfs.summary()
_________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d_1 (Conv2D) (None, 26, 26, 16) 160 _________________________________________________________________ separable_conv2d_1 (Separabl (None, 24, 24, 32) 688 _________________________________________________________________ separable_conv2d_2 (Separabl (None, 22, 22, 64) 2400 _________________________________________________________________ conv2d_2 (Conv2D) (None, 22, 22, 32) 2080 _________________________________________________________________ separable_conv2d_3 (Separabl (None, 10, 10, 32) 6464 _________________________________________________________________ dropout_1 (Dropout) (None, 10, 10, 32) 0 _________________________________________________________________ separable_conv2d_4 (Separabl (None, 1, 1, 10) 3530 _________________________________________________________________ flatten_1 (Flatten) (None, 10) 0 _________________________________________________________________ activation_1 (Activation) (None, 10) 0 ================================================================= Total params: 15,322 Trainable params: 15,322 Non-trainable params: 0 _________________________________________________________________
In [10]:
from keras.backend import get_value
from keras.callbacks import Callback
from jovian import log_hyperparams, log_metrics


class KerasCallback(Callback):
    """Keras Callback to log hyperparameters and metrics during model training.
    
    Arguments:
        arch_name :  A name for the architecture that you're using
    """
    def __init__(self, arch_name):
        self.arch_name = arch_name
        self.met_names = ['epoch', 'train']
    def on_train_begin(self, logs=None):
        hyp_dict = {
            'arch_name': self.arch_name,
            'epochs': self.params['epochs'],
            'batch_size': self.params['batch_size'],
            'loss_func': self.model.loss,
            'opt_func': str(self.model.optimizer.__class__).split("'")[1],
            'weight_decay': self.model.optimizer.initial_decay,
            'learning_rate': str(get_value(self.model.optimizer.lr))
        }
        log_hyperparams(hyp_dict)
    
    def on_epoch_end(self, epoch, logs):
        met_dict = {
            'epoch' : epoch
        }
        for key, value in logs.items():
            logs[key] = round(value, 4)
        met_dict.update(logs)
        log_metrics(met_dict)
        
In [11]:
sfs.compile(loss='categorical_crossentropy',
            optimizer='adam',
            metrics=['accuracy', 'mae'])
W0731 11:06:38.277813 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/optimizers.py:790: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead. W0731 11:06:38.300105 140314462373696 deprecation_wrapper.py:119] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:3295: The name tf.log is deprecated. Please use tf.math.log instead.
In [12]:
jvn_cb = KerasCallback('custom1')
In [13]:
sfs.fit(X_train,
        Y_train,
        batch_size = 256,
        epochs = 2,
        verbose = 1,
        validation_data = (X_test, Y_test),
        callbacks = [jvn_cb])
W0731 11:06:38.402207 140314462373696 deprecation.py:323] From /home/siddhant/anaconda3/envs/keras/lib/python3.6/site-packages/tensorflow/python/ops/math_grad.py:1250: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version. Instructions for updating: Use tf.where in 2.0, which has the same broadcast rule as np.where
Train on 60000 samples, validate on 10000 samples [jovian] Hypermaters logged. Epoch 1/2 60000/60000 [==============================] - 14s 230us/step - loss: 1.1936 - acc: 0.6297 - mean_absolute_error: 0.0977 - val_loss: 0.4086 - val_acc: 0.9238 - val_mean_absolute_error: 0.0347 [jovian] Metrics logged. Epoch 2/2 60000/60000 [==============================] - 11s 188us/step - loss: 0.2735 - acc: 0.9455 - mean_absolute_error: 0.0237 - val_loss: 0.1044 - val_acc: 0.9689 - val_mean_absolute_error: 0.0100 [jovian] Metrics logged.
Out[13]:
<keras.callbacks.History at 0x7f9d0aac6358>
In [ ]:
jovian.commit(nb_filename='keras_callback.ipynb')
[jovian] Saving notebook..
In [ ]:
 
In [ ]: