0% found this document useful (0 votes)
24 views

Binary Classification - Ipynb - Colab

Uploaded by

Kavya
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
24 views

Binary Classification - Ipynb - Colab

Uploaded by

Kavya
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 5

from keras.

datasets import imdb


(train_data, train_labels), (test_data, test_labels) = imdb.load_data(
num_words=10000)

Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-dataset


17464789/17464789 [==============================] - 0s 0us/step

word_index = imdb.get_word_index()
reverse_word_index = dict(
[(value, key) for (key, value) in word_index.items()])
decoded_review = ' '.join(
[reverse_word_index.get(i - 3, '?') for i in train_data[0]])

Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-dataset


1641221/1641221 [==============================] - 0s 0us/step

import numpy as np
def vectorize_sequences(sequences, dimension=10000):
results = np.zeros((len(sequences), dimension))
for i, sequence in enumerate(sequences):
results[i, sequence] = 1.
return results
x_train = vectorize_sequences(train_data)
x_test = vectorize_sequences(test_data)
y_train = np.asarray(train_labels).astype('float32')
y_test = np.asarray(test_labels).astype('float32')

from keras import models


from keras import layers
model = models.Sequential()
model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))

model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])

from keras import optimizers


model.compile(optimizer=optimizers.RMSprop(learning_rate=0.001),
loss='binary_crossentropy',
metrics=['accuracy'])

from keras import losses


from keras import metrics
model.compile(optimizer=optimizers.RMSprop(learning_rate=0.001),
loss=losses.binary_crossentropy,
metrics=[metrics.binary_accuracy])

x_val = x_train[:10000]
partial_x_train = x_train[10000:]
y_val = y_train[:10000]
partial_y_train = y_train[10000:]

model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['acc'])
history = model.fit(partial_x_train,
partial_y_train,
epochs=20,
batch_size=512,
validation_data=(x_val, y_val))

Epoch 1/20
30/30 [==============================] - 3s 74ms/step - loss: 0.5002 - acc: 0.79
Epoch 2/20
30/30 [==============================] - 1s 32ms/step - loss: 0.3097 - acc: 0.89
Epoch 3/20
30/30 [==============================] - 1s 41ms/step - loss: 0.2340 - acc: 0.92
Epoch 4/20
30/30 [==============================] - 1s 35ms/step - loss: 0.1892 - acc: 0.93
Epoch 5/20
30/30 [==============================] - 1s 46ms/step - loss: 0.1639 - acc: 0.94
Epoch 6/20
30/30 [==============================] - 2s 57ms/step - loss: 0.1353 - acc: 0.95
Epoch 7/20
30/30 [==============================] - 1s 40ms/step - loss: 0.1158 - acc: 0.96
Epoch 8/20
30/30 [==============================] - 1s 35ms/step - loss: 0.1012 - acc: 0.96
Epoch 9/20
30/30 [==============================] - 1s 46ms/step - loss: 0.0852 - acc: 0.97
Epoch 10/20
30/30 [==============================] - 1s 40ms/step - loss: 0.0750 - acc: 0.97
Epoch 11/20
30/30 [==============================] - 1s 40ms/step - loss: 0.0627 - acc: 0.98
Epoch 12/20
30/30 [==============================] - 1s 35ms/step - loss: 0.0511 - acc: 0.98
Epoch 13/20
30/30 [==============================] - 1s 39ms/step - loss: 0.0458 - acc: 0.98
Epoch 14/20
30/30 [==============================] - 1s 39ms/step - loss: 0.0401 - acc: 0.98
Epoch 15/20
30/30 [==============================] - 1s 44ms/step - loss: 0.0339 - acc: 0.99
Epoch 16/20
30/30 [==============================] - 2s 61ms/step - loss: 0.0264 - acc: 0.99
Epoch 17/20
30/30 [==============================] - 1s 42ms/step - loss: 0.0249 - acc: 0.99
Epoch 18/20
30/30 [==============================] - 1s 39ms/step - loss: 0.0214 - acc: 0.99
Epoch 19/20
30/30 [==============================] - 1s 37ms/step - loss: 0.0180 - acc: 0.99
Epoch 20/20
30/30 [==============================] - 1s 33ms/step - loss: 0.0150 - acc: 0.99

import matplotlib.pyplot as plt


history_dict = history.history
loss_values = history_dict['loss']
val_loss_values = history_dict['val_loss']
epochs = range(1, len(loss_values) + 1)
plt.plot(epochs, loss_values, 'bo', label='Training loss')
plt.plot(epochs, val_loss_values, 'b', label='Validation loss')
plt.title('Training and validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()

plt.clf()
acc = history_dict['acc']
val_acc = history_dict['val_acc']
plt.plot(epochs, acc, 'go', label='Training acc')
plt.plot(epochs, val_acc, 'g',
label='Validation acc')
plt.title('Training and validation accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
model = models.Sequential()
model.add(layers.Dense(16, activation='relu', input_shape=(10000,)))
model.add(layers.Dense(16, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer='rmsprop',
loss='binary_crossentropy',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=4, batch_size=512)
results = model.evaluate(x_test, y_test)

Epoch 1/4
49/49 [==============================] - 4s 57ms/step - loss: 0.5056 - accuracy
Epoch 2/4
49/49 [==============================] - 3s 56ms/step - loss: 0.3119 - accuracy
Epoch 3/4
49/49 [==============================] - 2s 43ms/step - loss: 0.2395 - accuracy
Epoch 4/4
49/49 [==============================] - 2s 39ms/step - loss: 0.2027 - accuracy
782/782 [==============================] - 2s 2ms/step - loss: 0.2801 - accuracy

You might also like