0% found this document useful (0 votes)
32 views5 pages

A3 - Jupyter Notebook PDF

Uploaded by

jayus24270
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
32 views5 pages

A3 - Jupyter Notebook PDF

Uploaded by

jayus24270
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 5

10/14/24, 11:30 AM A3 - Jupyter Notebook

In [2]: Name: Jay Uddhav Shinde


Roll No : 70
BE IT

In [ ]: #Build the Image classification model by dividing the model into following 4 stages:
#1.Loading and preprocessing the image data
#2.Defining the model's architecture
#3.Training the model
#4.Estimating the model's performance

In [22]: import numpy as np


import pandas as pd
import random
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Flatten, Conv2D,Dense,MaxPooling2D
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.datasets import mnist

In [23]: # Loading and preprocessing the image data


(X_train, y_train),(X_test, y_test) = mnist.load_data()

In [24]: print(X_train.shape)

(60000, 28, 28)

In [25]: X_train[0].min(),X_train[0].max()

Out[25]: (0, 255)

In [26]: X_train = (X_train - 0.0) / (255.0 - 0.0)


X_test = (X_test - 0.0) / (255.0 - 0.0)
X_train[0].min(), X_train[0].max() (0.0,
1.0)
(0.0, 1.0)
Out[26]:

localhost:8888/notebooks/A3.ipynb 1/5
10/14/24, 11:30 AM A3 - Jupyter Notebook

In [27]: def plot_digit(image, digit, plt, i):


plt.subplot(4, 5, i + 1) plt.imshow(image,
cmap=plt.get_cmap('gray'))
plt.title(f"Digit: {digit}") plt.xticks([])
plt.yticks([])

plt.figure(figsize=(16, 10))
for i in range(20):
plot_digit(X_train[i], y_train[i], plt, i)
plt.show()

In [28]: X_tarin = X_train.reshape((X_train.shape+ (1,)))


X_test = X_test.reshape((X_test.shape+(1,)))

In [29]: y_train[0:20]

Out[29]: array([5, 0, 4, 1, 9, 2, 1, 3, 1, 4, 3, 5, 3, 6, 1, 7, 2, 8, 6, 9],


dtype=uint8)

In [30]: # Defining the model's architecture

model = Sequential([
Conv2D(32,(3,3), activation="relu",input_shape=(28,28,1)),
MaxPooling2D((2,2)),
Flatten(),
Dense(100,activation="relu"),
Dense(10,activation="softmax")
])

In [31]: optimizer = SGD(learning_rate=0.01, momentum=0.9)


model.compile(
optimizer=optimizer,
loss="sparse_categorical_crossentropy",
metrics=["accuracy"]
)

localhost:8888/notebooks/A3.ipynb 2/5
10/14/24, 11:30 AM A3 - Jupyter Notebook

In [32]: model.summary()

Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_3 (Conv2D) (None, 26, 26, 32) 320

max_pooling2d_3 (MaxPoolin (None, 13, 13, 32) 0


g2D)
flatten_2 (Flatten)
(None, 5408) 0

dense_4 (Dense) (None, 100) 540900

dense_5 (Dense) (None, 10) 1010

=================================================================
Total params: 542230 (2.07 MB)
Trainable params: 542230 (2.07 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________

In [33]: #Training and testing the model


Model_log=model.fit(X_train, y_train, epochs=10, batch_size=15,verbose=1,validation_data=0);

Epoch 1/10
4000/4000 [==============================] - 52s 13ms/step - loss: 0.1749 - accuracy: 0.9453
Epoch 2/10
4000/4000 [==============================] - 50s 12ms/step - loss: 0.0560 - accuracy: 0.9829
Epoch 3/10
4000/4000 [==============================] - 50s 13ms/step - loss: 0.0346 - accuracy: 0.9890
Epoch 4/10
4000/4000 [==============================] - 50s 12ms/step - loss: 0.0241 - accuracy: 0.9924
Epoch 5/10
4000/4000 [==============================] - 50s 12ms/step - loss: 0.0153 - accuracy: 0.9953
Epoch 6/10
4000/4000 [==============================] - 50s 13ms/step - loss: 0.0107 - accuracy: 0.9968
Epoch 7/10
4000/4000 [==============================] - 50s 13ms/step - loss: 0.0070 - accuracy: 0.9977
Epoch 8/10
4000/4000 [==============================] - 50s 12ms/step - loss: 0.0050 - accuracy: 0.9985
Epoch 9/10
4000/4000 [==============================] - 50s 13ms/step - loss: 0.0023 - accuracy: 0.9995
Epoch 10/10
4000/4000 [==============================] - 50s 12ms/step - loss: 0.0022 - accuracy: 0.9994

localhost:8888/notebooks/A3.ipynb 3/5
10/14/24, 11:30 AM A3 - Jupyter Notebook

In [34]: plt.figure(figsize=(16, 10))


for i in range(20):
image = random.choice(X_test).squeeze()
digit = np.argmax(model.predict(image.reshape((1, 28, 28, 1)))[0], axis=-1)
plot_digit(image, digit, plt, i)
plt.show()

1/1 [==============================] - 0s 249ms/step


1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 55ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 63ms/step
1/1 [==============================] - 0s 43ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 47ms/step
1/1 [==============================] - 0s 39ms/step
1/1 [==============================] - 0s 49ms/step
1/1 [==============================] - 0s 52ms/step
1/1 [==============================] - 0s 48ms/step
1/1 [==============================] - 0s 47ms/step

In [35]: predictions = np.argmax(model.predict(X_test),axis=-1)


accuracy_score(y_test,predictions)

313/313 [==============================] - 2s 7ms/step


Out[35]: 0.9879

localhost:8888/notebooks/A3.ipynb 4/5
10/14/24, 11:30 AM A3 - Jupyter Notebook

In [36]: n = random.randint(0,9999)
plt.imshow(X_test[n])
plt.show()

In [38]: predicted_value = model.predict(X_test)


print("Handwritten number in the image is = %d" %np.argmax(predicted_value[n]))

313/313 [==============================] - 2s 7ms/step


Handwritten number in the image is = 5

In [39]: # Estimating the model's performance


score = model.evaluate(X_test,y_test,verbose=0)
print('Test loss:' , score[0])
print('Testaccuracy:',score[1])

Test loss: 0.044867195188999176


Testaccuracy: 0.9879000186920166

In [ ]:

localhost:8888/notebooks/A3.ipynb 5/5

You might also like