import tensorflow as tf
from tensorflow import keras
import matplotlib as mpl
from matplotlib import pyplot as plt
% matplotlib inline
import sklearn
import numpy as np
import pandas as pd
import os
import sys
import time
print ( np. __version__)
1.16.4
fashion_mnist = keras. datasets. fashion_mnist
( x_train_all, y_train_all) , ( x_test, y_test) = fashion_mnist. load_data( )
x_valid , x_train = x_train_all[ : 5000 ] , x_train_all[ 5000 : ]
y_valid , y_train = y_train_all[ : 5000 ] , y_train_all[ 5000 : ]
print ( x_valid. shape, y_valid. shape)
print ( x_train. shape, y_train. shape)
print ( x_test. shape, y_test. shape)
(5000, 28, 28) (5000,)
(55000, 28, 28) (55000,)
(10000, 28, 28) (10000,)
from sklearn. preprocessing import StandardScaler
scaler = StandardScaler( )
x_train_scaled = scaler. fit_transform(
x_train. astype( np. float32) . reshape( - 1 , 1 )
) . reshape( - 1 , 28 , 28 )
x_valid_scaled = scaler. transform(
x_valid. astype( np. float32) . reshape( - 1 , 1 )
) . reshape( - 1 , 28 , 28 )
x_test_scaled = scaler. transform(
x_test. astype( np. float32) . reshape( - 1 , 1 )
) . reshape( - 1 , 28 , 28 )
model = keras. models. Sequential( )
model. add( keras. layers. Flatten( input_shape = ( 28 , 28 ) ) )
model. add( keras. layers. Dense( 300 , activation = "relu" ) )
model. add( keras. layers. Dense( 100 , activation = "relu" ) )
model. add( keras. layers. Dense( 10 , activation = "softmax" ) )
model. compile (
loss = "sparse_categorical_crossentropy" ,
optimizer = "sgd" ,
metrics = [ "accuracy" ]
)
import os
logdir = './callbacks'
if not os. path. exists( logdir) :
os. mkdir( logdir)
output_model_file = os. path. join( logdir, "fashion_mnist_model.h5" )
callbacks = [
keras. callbacks. TensorBoard( logdir) ,
keras. callbacks. ModelCheckpoint(
output_model_file,
save_best_only= True
) ,
keras. callbacks. EarlyStopping(
patience= 5 ,
min_delta = 1e - 3
) ,
]
history = model. fit(
x_train_scaled,
y_train,
epochs= 10 ,
validation_data= ( x_valid_scaled, y_valid) ,
callbacks= callbacks
)
Train on 55000 samples, validate on 5000 samples
Epoch 1/10
55000/55000 [==============================] - 4s 70us/sample - loss: 0.9230 - accuracy: 0.6990 - val_loss: 0.6209 - val_accuracy: 0.7898
Epoch 2/10
55000/55000 [==============================] - 4s 65us/sample - loss: 0.5812 - accuracy: 0.7980 - val_loss: 0.5220 - val_accuracy: 0.8184
Epoch 3/10
55000/55000 [==============================] - 4s 65us/sample - loss: 0.5148 - accuracy: 0.8186 - val_loss: 0.4777 - val_accuracy: 0.8352
Epoch 4/10
55000/55000 [==============================] - 3s 63us/sample - loss: 0.4780 - accuracy: 0.8311 - val_loss: 0.4546 - val_accuracy: 0.8420
Epoch 5/10
55000/55000 [==============================] - 4s 64us/sample - loss: 0.4537 - accuracy: 0.8399 - val_loss: 0.4348 - val_accuracy: 0.8502
Epoch 6/10
55000/55000 [==============================] - 4s 64us/sample - loss: 0.4357 - accuracy: 0.8460 - val_loss: 0.4250 - val_accuracy: 0.8538
Epoch 7/10
55000/55000 [==============================] - 4s 64us/sample - loss: 0.4213 - accuracy: 0.8507 - val_loss: 0.4118 - val_accuracy: 0.8578
Epoch 8/10
55000/55000 [==============================] - 4s 64us/sample - loss: 0.4095 - accuracy: 0.8550 - val_loss: 0.4027 - val_accuracy: 0.8618
Epoch 9/10
55000/55000 [==============================] - 4s 67us/sample - loss: 0.3993 - accuracy: 0.8583 - val_loss: 0.4005 - val_accuracy: 0.8582
Epoch 10/10
55000/55000 [==============================] - 4s 64us/sample - loss: 0.3907 - accuracy: 0.8614 - val_loss: 0.3871 - val_accuracy: 0.8638