Signlanguage - Project - Jupyter Notebook
Signlanguage - Project - Jupyter Notebook
import mnist
import pandas as pd
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense,Flatten,Conv2D,MaxPool2D,Dropout
import matplotlib.pyplot as plt
import seaborn as sns
from tensorflow.keras.preprocessing.image import ImageDataGenerator
In [4]:
train_df=pd.read_csv("sign_mnist_train.csv")
train_df.head()
Out[4]:
label pixel1 pixel2 pixel3 pixel4 pixel5 pixel6 pixel7 pixel8 pixel9 ... pixel775 pixel
0 3 107 118 127 134 139 143 146 150 153 ... 207
1 6 155 157 156 156 156 157 156 158 158 ... 69
2 2 187 188 188 187 187 186 187 188 187 ... 202
3 2 211 211 212 212 211 210 211 210 210 ... 235
4 13 164 167 170 172 176 179 180 184 185 ... 92
In [5]:
test_df=pd.read_csv("sign_mnist_test.csv")
test_df.head()
Out[5]:
label pixel1 pixel2 pixel3 pixel4 pixel5 pixel6 pixel7 pixel8 pixel9 ... pixel775 pixel
0 6 149 149 150 150 150 151 151 150 151 ... 138
1 5 126 128 131 132 133 134 135 135 136 ... 47
3 0 203 205 207 206 207 209 210 209 210 ... 154
4 3 188 191 193 195 199 201 202 203 203 ... 26
train_df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 27455 entries, 0 to 27454
Columns: 785 entries, label to pixel784
dtypes: int64(785)
memory usage: 164.4 MB
In [7]:
test_df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 7172 entries, 0 to 7171
Columns: 785 entries, label to pixel784
dtypes: int64(785)
memory usage: 43.0 MB
In [8]:
train_df.describe()
Out[8]:
train_df.head(6)
Out[9]:
label pixel1 pixel2 pixel3 pixel4 pixel5 pixel6 pixel7 pixel8 pixel9 ... pixel775 pixel
0 3 107 118 127 134 139 143 146 150 153 ... 207
1 6 155 157 156 156 156 157 156 158 158 ... 69
2 2 187 188 188 187 187 186 187 188 187 ... 202
3 2 211 211 212 212 211 210 211 210 210 ... 235
4 13 164 167 170 172 176 179 180 184 185 ... 92
5 16 161 168 172 173 178 184 189 193 196 ... 76
In [10]:
train_label=train_df['label']
train_label.head()
trainset=train_df.drop(['label'],axis=1)
trainset.head()
Out[10]:
pixel1 pixel2 pixel3 pixel4 pixel5 pixel6 pixel7 pixel8 pixel9 pixel10 ... pixel775 pix
0 107 118 127 134 139 143 146 150 153 156 ... 207
1 155 157 156 156 156 157 156 158 158 157 ... 69
2 187 188 188 187 187 186 187 188 187 186 ... 202
3 211 211 212 212 211 210 211 210 210 211 ... 235
4 164 167 170 172 176 179 180 184 185 186 ... 92
In [11]:
X_train = trainset.values
X_train = trainset.values.reshape(-1,28,28,1)
print(X_train.shape)
test_label=test_df['label']
X_test=test_df.drop(['label'],axis=1)
print(X_test.shape)
X_test.head()
(7172, 784)
Out[13]:
pixel1 pixel2 pixel3 pixel4 pixel5 pixel6 pixel7 pixel8 pixel9 pixel10 ... pixel775 pix
0 149 149 150 150 150 151 151 150 151 152 ... 138
1 126 128 131 132 133 134 135 135 136 138 ... 47
3 203 205 207 206 207 209 210 209 210 209 ... 154
4 188 191 193 195 199 201 202 203 203 203 ... 26
In [14]:
In [15]:
y_train
Out[15]:
In [16]:
X_test=X_test.values.reshape(-1,28,28,1)
In [17]:
print(X_train.shape,y_train.shape,X_test.shape,y_test.shape)
(27455, 28, 28, 1) (27455, 24) (7172, 28, 28, 1) (7172, 24)
In [18]:
X_test=X_test/255
In [19]:
fig,axe=plt.subplots(2,2)
fig.suptitle('Preview of dataset')
axe[0,0].imshow(X_train[0].reshape(28,28),cmap='gray')
axe[0,0].set_title('label: 3 letter: C')
axe[0,1].imshow(X_train[1].reshape(28,28),cmap='gray')
axe[0,1].set_title('label: 6 letter: F')
axe[1,0].imshow(X_train[2].reshape(28,28),cmap='gray')
axe[1,0].set_title('label: 2 letter: B')
axe[1,1].imshow(X_train[4].reshape(28,28),cmap='gray')
axe[1,1].set_title('label: 13 letter: M')
Out[19]:
sns.countplot(train_label)
plt.title("Frequency of each label")
C:\Users\Lenovo\anaconda3\lib\site-packages\seaborn\_decorators.py:36: Futur
eWarning: Pass the following variable as a keyword arg: x. From version 0.1
2, the only valid positional argument will be `data`, and passing other argu
ments without an explicit keyword will result in an error or misinterpretati
on.
warnings.warn(
Out[20]:
In [21]:
model=Sequential()
model.add(Conv2D(128,kernel_size=(5,5),
strides=1,padding='same',activation='relu',input_shape=(28,28,1)))
model.add(MaxPool2D(pool_size=(3,3),strides=2,padding='same'))
model.add(Conv2D(64,kernel_size=(2,2),
strides=1,activation='relu',padding='same'))
model.add(MaxPool2D((2,2),2,padding='same'))
model.add(Conv2D(32,kernel_size=(2,2),
strides=1,activation='relu',padding='same'))
model.add(MaxPool2D((2,2),2,padding='same'))
model.add(Flatten())
In [22]:
model.add(Dense(units=512,activation='relu'))
model.add(Dropout(rate=0.25))
model.add(Dense(units=24,activation='softmax'))
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 28, 28, 128) 3328
=================================================================
Total params: 319,352
Trainable params: 319,352
Non-trainable params: 0
_________________________________________________________________
In [23]:
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
In [25]:
model.fit(train_datagen.flow(X_train,y_train,batch_size=200),
epochs = 10,
validation_data=(X_test,y_test),
shuffle=1
)
Epoch 1/10
138/138 [==============================] - 63s 455ms/step - loss: 2.8499 - a
ccuracy: 0.1407 - val_loss: 2.1006 - val_accuracy: 0.3420
Epoch 2/10
138/138 [==============================] - 60s 438ms/step - loss: 2.1736 - a
ccuracy: 0.3164 - val_loss: 1.4515 - val_accuracy: 0.4858
Epoch 3/10
138/138 [==============================] - 63s 457ms/step - loss: 1.6268 - a
ccuracy: 0.4687 - val_loss: 1.0478 - val_accuracy: 0.6471
Epoch 4/10
138/138 [==============================] - 62s 452ms/step - loss: 1.2565 - a
ccuracy: 0.5830 - val_loss: 0.7785 - val_accuracy: 0.7462
Epoch 5/10
138/138 [==============================] - 60s 436ms/step - loss: 1.0422 - a
ccuracy: 0.6530 - val_loss: 0.6935 - val_accuracy: 0.7510
Epoch 6/10
138/138 [==============================] - 60s 434ms/step - loss: 0.8913 - a
ccuracy: 0.6970 - val_loss: 0.5678 - val_accuracy: 0.8113
Epoch 7/10
138/138 [==============================] - 60s 435ms/step - loss: 0.7566 - a
ccuracy: 0.7468 - val_loss: 0.4137 - val_accuracy: 0.8783
Epoch 8/10
138/138 [==============================] - 60s 438ms/step - loss: 0.6505 - a
ccuracy: 0.7800 - val_loss: 0.3338 - val_accuracy: 0.8813
Epoch 9/10
138/138 [==============================] - 60s 438ms/step - loss: 0.5672 - a
ccuracy: 0.8090 - val_loss: 0.2608 - val_accuracy: 0.9265
Epoch 10/10
138/138 [==============================] - 60s 435ms/step - loss: 0.5077 - a
ccuracy: 0.8273 - val_loss: 0.2717 - val_accuracy: 0.9113
Out[25]:
<keras.callbacks.History at 0x1ba7f17a6d0>
In [26]:
(ls,acc)=model.evaluate(x=X_test,y=y_test)
In [27]:
In [ ]: