Sequential and Functional Interfaces

In [1]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
In [2]:
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
import h5py
warnings.resetwarnings()
warnings.simplefilter(action='ignore', category=ImportWarning)
warnings.simplefilter(action='ignore', category=RuntimeWarning)
warnings.simplefilter(action='ignore', category=DeprecationWarning)
warnings.simplefilter(action='ignore', category=ResourceWarning)
In [3]:
from keras import models
from keras import layers
from keras import optimizers
Using TensorFlow backend.
In [4]:
from keras.utils import to_categorical
from keras.datasets import mnist
In [5]:
(X_train, y_train), (X_test, y_test) = mnist.load_data()
In [6]:
X_train = X_train.reshape((-1, 28*28))
X_train = X_train.astype('float32')/255
X_test = X_test.reshape((-1, 28*28))
X_test = X_test.astype('float32')/255
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)

Sequential Model

In [7]:
model = models.Sequential()
model.add(layers.Dense(32, activation='relu', input_shape=(28*28,)))
model.add(layers.Dense(10, activation='softmax'))
In [8]:
model.compile(optimizer='rmsprop',
              loss='categorical_crossentropy',
              metrics=['accuracy'])
In [9]:
model.fit(X_train, y_train,
          batch_size=128, epochs=10)
Epoch 1/10
60000/60000 [==============================] - 7s 113us/step - loss: 0.4663 - acc: 0.8763
Epoch 2/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.2368 - acc: 0.9319
Epoch 3/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1959 - acc: 0.9429
Epoch 4/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1701 - acc: 0.9505
Epoch 5/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1519 - acc: 0.9554
Epoch 6/10
60000/60000 [==============================] - 1s 23us/step - loss: 0.1380 - acc: 0.9591
Epoch 7/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1265 - acc: 0.9634
Epoch 8/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1175 - acc: 0.9660
Epoch 9/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1104 - acc: 0.9675
Epoch 10/10
60000/60000 [==============================] - 1s 21us/step - loss: 0.1038 - acc: 0.9688
Out[9]:
<keras.callbacks.History at 0x7f2cc7c0e630>
In [10]:
test_loss, test_acc = model.evaluate(X_test, y_test)
10000/10000 [==============================] - 0s 38us/step
In [11]:
test_loss, test_acc
Out[11]:
(0.12339150645062327, 0.9629)

Functional Model

In [12]:
input_tensor = layers.Input(shape=(784,))
x = layers.Dense(32, activation='relu')(input_tensor)
output_tensor = layers.Dense(10, activation='softmax')(x)
model = models.Model(inputs=input_tensor, outputs=output_tensor)
In [13]:
model.compile(optimizer='rmsprop',
              loss='categorical_crossentropy',
              metrics=['accuracy'])
In [14]:
model.fit(X_train, y_train,
          batch_size=128, epochs=10)
Epoch 1/10
60000/60000 [==============================] - 1s 23us/step - loss: 0.4438 - acc: 0.8815
Epoch 2/10
60000/60000 [==============================] - 1s 21us/step - loss: 0.2417 - acc: 0.9304
Epoch 3/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1989 - acc: 0.9432
Epoch 4/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1719 - acc: 0.9506
Epoch 5/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1527 - acc: 0.9558
Epoch 6/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1384 - acc: 0.9600
Epoch 7/10
60000/60000 [==============================] - 1s 21us/step - loss: 0.1272 - acc: 0.9631
Epoch 8/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1174 - acc: 0.9655
Epoch 9/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1104 - acc: 0.9677
Epoch 10/10
60000/60000 [==============================] - 1s 22us/step - loss: 0.1034 - acc: 0.9699
Out[14]:
<keras.callbacks.History at 0x7f2cc585a898>
In [15]:
test_loss, test_acc = model.evaluate(X_test, y_test)
10000/10000 [==============================] - 0s 40us/step
In [16]:
test_loss, test_acc
Out[16]:
(0.12404100528806448, 0.964)