Load MNIST dataset in keras

In [11]:
from keras.datasets import mnist
(train_images,train_labels),(test_images,test_labels)=mnist.load_data()
In [12]:
print(train_images.shape)
print(len(train_labels))
print(test_images.shape)
print(len(test_labels))
(60000, 28, 28)
60000
(10000, 28, 28)
10000

Network Architecture

In [13]:
from keras import models
from keras import layers

network=models.Sequential()
network.add(layers.Dense(512,activation='relu',input_shape=(28*28,)))
network.add(layers.Dense(10,activation='softmax'))
In [14]:
network.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_3 (Dense)              (None, 512)               401920    
_________________________________________________________________
dense_4 (Dense)              (None, 10)                5130      
=================================================================
Total params: 407,050
Trainable params: 407,050
Non-trainable params: 0
_________________________________________________________________

Compilation Step

In [15]:
network.compile(optimizer='rmsprop',loss='categorical_crossentropy',metrics=['accuracy'])

Preparing Image Data

In [16]:
# CONVERTING IT TO A ONE-DIMENSIONAL VECTOR

train_images=train_images.reshape((60000,28*28))
test_images=test_images.reshape((10000,28*28))

train_images=train_images.astype('float32')/255
test_images=test_images.astype('float32')/255

Preparing Labels

In [17]:
from keras.utils import to_categorical

# CONVERTING LABELS

train_labels=to_categorical(train_labels)
test_labels=to_categorical(test_labels)

Training the network

In [18]:
network.fit(train_images, train_labels, epochs=5, batch_size=128)
Epoch 1/5
60000/60000 [==============================] - 13s 223us/step - loss: 0.2537 - acc: 0.9263
Epoch 2/5
60000/60000 [==============================] - 6s 94us/step - loss: 0.1037 - acc: 0.9694
Epoch 3/5
60000/60000 [==============================] - 6s 96us/step - loss: 0.0673 - acc: 0.9798
Epoch 4/5
60000/60000 [==============================] - 6s 94us/step - loss: 0.0486 - acc: 0.9853
Epoch 5/5
60000/60000 [==============================] - 6s 94us/step - loss: 0.0363 - acc: 0.9892
Out[18]:
<keras.callbacks.History at 0x6ceea02978>

Testing the data

In [19]:
test_loss,test_acc=network.evaluate(test_images,test_labels)
10000/10000 [==============================] - 2s 217us/step
In [20]:
print(test_loss)
print(test_acc)
0.06720741260098294
0.9802