Note : This is a generated markdown export from the Jupyter notebook file classification_neural_net.ipynb .
You can also view the notebook with the nbviewer from Jupyter.
Classification with a neural network
% matplotlib inline
import tensorflow as tf
import seaborn as sns
import numpy as np
import pandas as pd
import matplotlib .pyplot as plt
from sklearn import datasets , metrics , model_selection
digits = datasets .load_digits ()
fig , axes = plt .subplots (nrows = 1 , ncols = 10 , figsize = (10 , 3 ))
for ax , image , label in zip (axes , digits .images , digits .target ):
ax .set_axis_off ()
ax .imshow (image , cmap = plt .cm .gray_r )
ax .set_title ('%i' % label )
plt .figure ()
plt .imshow (digits .images [0 ], cmap = plt .cm .gray_r )
plt .colorbar ()
plt .grid (False )
plt .show ()
target = digits .target
data = digits .images
print ("min value: {}" .format (np .amin (data )))
print ("max value: {}" .format (np .amax (data )))
print ("shape: {}" .format (np .shape (data )))
min value: 0.0
max value: 16.0
shape: (1797, 8, 8)
X_train , X_test , y_train , y_test = model_selection .train_test_split (
data , target , test_size = 0.5 )
X_train = X_train .astype ('float32' ) / 16.
X_test = X_test .astype ('float32' ) / 16.
df_train = pd .DataFrame (y_train , columns = ['target' ])
df_train ['type' ] = 'train'
df_test = pd .DataFrame (y_test , columns = ['target' ])
df_test ['type' ] = 'test'
df_set = df_train .append (df_test )
_ = sns .countplot (x = 'target' , hue = 'type' , data = df_set )
print ('train samples:' , len (X_train ))
print ('test samples' , len (X_test ))
train samples: 898
test samples 899
model = tf .keras .Sequential ([
tf .keras .layers .Flatten (input_shape = (8 , 8 )),
tf .keras .layers .Dense (128 , activation = 'relu' ),
tf .keras .layers .Dense (10 )
])
model .compile (optimizer = 'adam' ,
loss = tf .keras .losses .SparseCategoricalCrossentropy (from_logits = True ),
metrics = ['accuracy' ])
model .summary ()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten (Flatten) (None, 64) 0
_________________________________________________________________
dense (Dense) (None, 128) 8320
_________________________________________________________________
dense_1 (Dense) (None, 10) 1290
=================================================================
Total params: 9,610
Trainable params: 9,610
Non-trainable params: 0
_________________________________________________________________
% % time
history = model .fit (X_train , y_train , epochs = 100 , validation_split = 0.2 , verbose = 0 )
CPU times: user 6.55 s, sys: 822 ms, total: 7.37 s
Wall time: 5.77 s
hist = pd .DataFrame (history .history )
hist ['epoch' ] = history .epoch
hist .tail ()
loss
accuracy
val_loss
val_accuracy
epoch
95
0.007926
1.0
0.097244
0.977778
95
96
0.007718
1.0
0.097566
0.972222
96
97
0.007589
1.0
0.098171
0.977778
97
98
0.007448
1.0
0.098217
0.977778
98
99
0.007221
1.0
0.098246
0.977778
99
def plot_loss (history ):
plt .plot (history .history ['loss' ], label = 'loss' )
plt .plot (history .history ['val_loss' ], label = 'val_loss' )
plt .xlabel ('Epoch' )
plt .ylabel ('Error' )
plt .legend ()
plt .grid (True )
plot_loss (history )
test_loss , test_acc = model .evaluate (X_test , y_test , verbose = 0 )
print ('Test loss:' , test_loss )
print ('Test accuracy:' , test_acc )
Test loss: 0.11751686781644821
Test accuracy: 0.9688543081283569
probability_model = tf .keras .Sequential ([model ,
tf .keras .layers .Softmax ()])
predicted = [np .argmax (x ) for x in probability_model .predict (X_test )]
confusion_matrix = pd .DataFrame (metrics .confusion_matrix (y_test , predicted ))
confusion_matrix
0
1
2
3
4
5
6
7
8
9
0
80
0
0
0
2
0
1
0
0
0
1
0
83
0
0
0
0
2
0
1
0
2
0
1
78
0
0
0
0
0
0
0
3
0
0
0
92
0
1
0
0
0
0
4
0
0
0
0
105
0
0
0
1
0
5
0
1
0
0
0
90
1
0
0
2
6
0
1
0
0
0
1
86
0
0
0
7
0
0
0
0
1
0
0
83
0
0
8
0
8
0
1
0
0
0
0
79
0
9
0
1
0
0
0
1
0
1
0
95
_ = sns .heatmap (confusion_matrix , annot = True , cmap = "Blues" )
print ("accuracy: {:.3f}" .format (metrics .accuracy_score (y_test , predicted )))
print ("precision: {:.3f}" .format (metrics .precision_score (y_test , predicted , average = 'weighted' )))
print ("recall: {:.3f}" .format (metrics .recall_score (y_test , predicted , average = 'weighted' )))
print ("f1 score: {:.3f}" .format (metrics .f1_score (y_test , predicted , average = 'weighted' )))
accuracy: 0.969
precision: 0.970
recall: 0.969
f1 score: 0.969