0% found this document useful (0 votes)
58 views5 pages

MNIST - Ipynb - Colab

Uploaded by

Kavya
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
58 views5 pages

MNIST - Ipynb - Colab

Uploaded by

Kavya
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 5

import tensorflow as tf

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from tensorflow.keras.datasets import mnist

(x_train,y_train),(x_test,y_test)=mnist.load_data()

Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-dataset


11490434/11490434 [==============================] - 0s 0us/step

x_train=x_train/255.0

y_train=tf.keras.utils.to_categorical(y_train)

model=tf.keras.models.Sequential()
model.add(tf.keras.layers.Input(shape=(28,28)))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(units=128,activation=tf.keras.activations.relu))
model.add(tf.keras.layers.Dense(units=16,activation=tf.keras.activations.relu))
model.add(tf.keras.layers.Dense(10,activation=tf.keras.activations.softmax))

model.summary()

Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten (Flatten) (None, 784) 0

dense (Dense) (None, 128) 100480

dense_1 (Dense) (None, 16) 2064

dense_2 (Dense) (None, 10) 170

=================================================================
Total params: 102714 (401.23 KB)
Trainable params: 102714 (401.23 KB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

model.compile(optimizer=tf.keras.optimizers.SGD(),loss=tf.keras.losses.categorical_crossen

np.ceil(54_000/64)

844.0
model_history=model.fit(x_train,y_train,epochs=10,batch_size=32,validation_split=0.1)

Epoch 1/10
1688/1688 [==============================] - 6s 3ms/step - loss: 0.8021 - acc: 0
Epoch 2/10
1688/1688 [==============================] - 4s 2ms/step - loss: 0.3245 - acc: 0
Epoch 3/10
1688/1688 [==============================] - 5s 3ms/step - loss: 0.2639 - acc: 0
Epoch 4/10
1688/1688 [==============================] - 5s 3ms/step - loss: 0.2288 - acc: 0
Epoch 5/10
1688/1688 [==============================] - 4s 2ms/step - loss: 0.2018 - acc: 0
Epoch 6/10
1688/1688 [==============================] - 5s 3ms/step - loss: 0.1808 - acc: 0
Epoch 7/10
1688/1688 [==============================] - 4s 3ms/step - loss: 0.1636 - acc: 0
Epoch 8/10
1688/1688 [==============================] - 4s 2ms/step - loss: 0.1485 - acc: 0
Epoch 9/10
1688/1688 [==============================] - 5s 3ms/step - loss: 0.1359 - acc: 0
Epoch 10/10
1688/1688 [==============================] - 4s 2ms/step - loss: 0.1246 - acc: 0

model_history.history

{'loss': [0.8020551204681396,
0.3244524300098419,
0.263948529958725,
0.22877676784992218,
0.2018246054649353,
0.180805966258049,
0.16355068981647491,
0.1484735757112503,
0.13590681552886963,
0.12456703186035156],
'acc': [0.7740370631217957,
0.9074629545211792,
0.9254814982414246,
0.9351111054420471,
0.9424814581871033,
0.9479073882102966,
0.9537592530250549,
0.9572036862373352,
0.9608888626098633,
0.9643518328666687],
'val_loss': [0.32676392793655396,
0.23804479837417603,
0.20014946162700653,
0.17659641802310944,
0.1660967767238617,
0.14596469700336456,
0.13749150931835175,
0.12439301609992981,
0.120612233877182,
0.11395595967769623],
'val_acc': [0.9128333330154419,
0.9319999814033508,
0.9438333511352539,
0.9518333077430725,
0.9555000066757202,
0.9626666903495789,
0.9621666669845581,
0.9648333191871643,
0.9660000205039978,
0.9693333506584167]}

plt.plot(model_history.history.get("acc"),label="Training")
plt.plot(model_history.history.get("val_acc"),label="Validation")
plt.legend()
plt.show()

plt.title("MNIST_Loss_Epoch10")
plt.plot(model_history.history.get("loss"),label="loss")
plt.plot(model_history.history.get("val_loss"),label="Val Loss")
plt.legend()
plt.show()

len(model.get_weights())

6
for i in model.get_weights():
print(f"{i.shape}")

(784, 128)
(128,)
(128, 16)
(16,)
(16, 10)
(10,)

test_image=x_test[0]
plt.imshow(test_image)

<matplotlib.image.AxesImage at 0x792948ee2c20>

test_image=test_image/255.0
model.input_shape

(None, 28, 28)

You might also like