### 2.1 Hyperparameters¶

In [7]:
# Hyperparameters
training_epochs = 30 # Total number of training epochs
learning_rate = 0.01 # The learning rate


### 2.2 Creating a model¶

In [8]:
# create a model
def create_model():
model = Sequential()
# Input layer
kernel_regularizer= tf.keras.regularizers.l2(0.01),activation='tanh'))
# Output layer

# Compile a model
model.compile(loss='categorical_crossentropy',
return model
model = create_model()
model.summary()

Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #
=================================================================
dense (Dense)                (None, 64)                4160
_________________________________________________________________
dense_1 (Dense)              (None, 10)                650
=================================================================
Total params: 4,810
Trainable params: 4,810
Non-trainable params: 0
_________________________________________________________________


### 2.3 Train the model¶

In [9]:
results = model.fit(
X_train, y_train,
epochs= training_epochs,
batch_size = 516,
validation_data = (X_test, y_test),
verbose = 0
)


### 2.4 Test the model¶

Model can generate output predictions for the input samples.

In [10]:
prediction_values = model.predict_classes(X_test)


### 2.5 Evaluate the model to see the accuracy¶

In [11]:
print("Evaluating on training set...")
(loss, accuracy) = model.evaluate(X_train,y_train, verbose=0)
print("loss={:.4f}, accuracy: {:.4f}%".format(loss,accuracy * 100))

print("Evaluating on testing set...")
(loss, accuracy) = model.evaluate(X_test, y_test, verbose=0)
print("loss={:.4f}, accuracy: {:.4f}%".format(loss,accuracy * 100))

Evaluating on training set...
loss=0.1054, accuracy: 99.8338%
Evaluating on testing set...
loss=0.1842, accuracy: 97.1380%


### 2.6 Summarize history for accuracy¶

In [12]:
# summarize history for accuracy
plt.plot(results.history['accuracy'])
plt.plot(results.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'])

Out[12]:
<matplotlib.legend.Legend at 0x221922776d8>

### 2.7 Summarize history for loss¶

In [13]:
plt.plot(results.history['loss'])
plt.plot(results.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'])

max_loss = np.max(results.history['loss'])
min_loss = np.min(results.history['loss'])
print("Maximum Loss : {:.4f}".format(max_loss))
print("")
print("Minimum Loss : {:.4f}".format(min_loss))
print("")
print("Loss difference : {:.4f}".format((max_loss - min_loss)))

Maximum Loss : 2.9333

Minimum Loss : 0.1097

Loss difference : 2.8236