Untitled Document
Untitled Document
import tensorflow as tf
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv1D,
MaxPool1D,Flatten,Dense,Dropout,BatchNormalization
from tensorflow.keras.optimizers import Adam
In [21]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
In [22]:
cancerData = datasets.load_breast_cancer()
In [24]:
X = pd.DataFrame(data = cancerData.data,
columns=cancerData.feature_names )
X.head()
Out[24]:
m
w
e
w or
m m a m w
o st
e e n ea o w wo
m m m r w c
a a m c n r o w wo rst
ea e me mea ea s or wor wor o
n n ea o fra s r or rst fra
n a an n n t st st st n
t n n cta t s st sy cta
pe n sm com sy t pe sm com c
r e co c l r t co m l
ri a oot pact m e ri oot pact a
a x nc a di a a nc m di
m r hne nes m x m hne nes v
d t av v m d r av etr me
et e ss s etr t et ss s e
i u ity e en i e ity y nsi
er a y u er p
u r p sio u a on
r oi
s e oi n s
e nt
nt
s
s
1 0. 2
1 1 2 1 0.
0 1 0. 0
7 0 12 0.1 0. 0. 5 7 18 0. 2 0. 0.1
0 0.27 4 07 1 0.1 0.66
. . 2. 184 30 24 . . 4. 71 6 46 18
1 760 7 87 9 622 56
9 3 80 0 01 19 3 3 60 19 5 01 90
. 1 1 .
9 8 8 3 4
0 0 0
1 0. 1
2 1 2 2 0.
3 0 0. 9
0 7 13 0.0 0. 0. 4 3 15 0. 1 0. 0.0
2 0.07 7 05 5 0.1 0.18
. . 2. 847 08 18 . . 8. 24 8 27 89
6 864 0 66 6 238 66
5 7 90 4 69 12 9 4 80 16 6 50 02
. 1 7 .
7 7 9 1 0
0 7 0
0.
1 2 3 1 2 5 0.
1 0.
1 0 77 8 0.1 0. 0. 4 6 98 6 0. 2 0. 0.1
0.28 0 09 0.2 0.86
. . .5 6 425 24 25 . . .8 7 68 5 66 73
390 5 74 098 63
4 3 8 . 0 14 97 9 5 7 . 69 7 38 00
2 4
2 8 1 1 0 7 5
0
1 0. 1
2 1 2 1 0.
2 1 0. 5
0 4 13 0.1 0. 0. 2 6 15 0. 1 0. 0.0
9 0.13 0 05 7 0.1 0.20
. . 5. 003 19 18 . . 2. 40 6 23 76
7 280 4 88 5 374 50
2 3 10 0 80 09 5 6 20 00 2 64 78
. 3 3 .
9 4 4 7 5
0 0 0
5 rows × 30 columns
In [25]:
y = cancerData.target
In [26]:
X.shape
Out[26]:
(569, 30)
In [28]:
X_train.shape
Out[28]:
(512, 30)
In [29]:
y_test.shape
Out[29]:
(57,)
Applying StandardScaler()
In [30]:
scaler = StandardScaler()
In [31]:
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
X_train = X_train.reshape(512,30,1)
X_test = X_test.reshape(57,30,1)
model = Sequential()
model.add(Conv1D(filters=16,kernel_size=2,activation='relu',input_shape=(3
0,1)))
model.add(BatchNormalization())
model.add(Dropout(0.2))
model.add(Conv1D(32,2,activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(32,activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(1,activation='sigmoid'))
In [34]:
model.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv1d_2 (Conv1D) (None, 29, 16) 48
_________________________________________________________________
batch_normalization_2 (Batch (None, 29, 16) 64
_________________________________________________________________
dropout_3 (Dropout) (None, 29, 16) 0
_________________________________________________________________
conv1d_3 (Conv1D) (None, 28, 32) 1056
_________________________________________________________________
batch_normalization_3 (Batch (None, 28, 32) 128
_________________________________________________________________
dropout_4 (Dropout) (None, 28, 32) 0
_________________________________________________________________
flatten_1 (Flatten) (None, 896) 0
_________________________________________________________________
dense_2 (Dense) (None, 32) 28704
_________________________________________________________________
dropout_5 (Dropout) (None, 32) 0
_________________________________________________________________
dense_3 (Dense) (None, 1) 33
=================================================================
Total params: 30,033
Trainable params: 29,937
Non-trainable params: 96
_________________________________________________________________
In [35]:
model.compile(optimizer=Adam(learning_rate=0.0001),loss='binary_crossentro
py',metrics=['accuracy'])
In [36]:
history =
model.fit(X_train,y_train,epochs=35,verbose=1,validation_data=(X_test,y_te
st))
def plotLearningCurve(history,epochs):
epochRange = range(1,epochs+1)
plt.plot(epochRange,history.history['accuracy'])
plt.plot(epochRange,history.history['val_accuracy'])
plt.title('Model Accuracy')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend(['Train','Validation'],loc='upper left')
plt.show()
plt.plot(epochRange,history.history['loss'])
plt.plot(epochRange,history.history['val_loss'])
plt.title('Model Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend(['Train','Validation'],loc='upper left')
plt.show()
In [38]:
linkcode
plotLearningCurve(history,35)