24CS4505 ML Assignment
24CS4505 ML Assignment
ipynb - Colab
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import pandas as pd
import numpy as np
data = pd.read_csv("heart.csv")
X = data.drop(columns=['target'])
y = data['target']
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
model = Sequential([
Dense(64, activation='relu', input_shape=(X_train.shape[1],)),
Dropout(0.2),
Dense(32, activation='relu'),
Dense(1, activation='sigmoid')
])
epochs = 100
batch_size = 16
history = model.fit(X_train, y_train, epochs=epochs, batch_size=batch_size, validation_data=(X_test, y_test))
Epoch 1/100
/usr/local/lib/python3.11/dist-packages/keras/src/layers/core/dense.py:87: UserWarning: Do not pass an `input_shape`/`input_dim`
super().__init__(activity_regularizer=activity_regularizer, **kwargs)
52/52 ━━━━━━━━━━━━━━━━━━━━ 2s 7ms/step - accuracy: 0.7070 - loss: 0.6009 - val_accuracy: 0.7951 - val_loss: 0.4475
Epoch 2/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.8611 - loss: 0.3705 - val_accuracy: 0.8146 - val_loss: 0.4036
Epoch 3/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.8671 - loss: 0.3343 - val_accuracy: 0.8146 - val_loss: 0.3908
Epoch 4/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 5ms/step - accuracy: 0.8954 - loss: 0.3055 - val_accuracy: 0.8195 - val_loss: 0.3802
Epoch 5/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 1s 5ms/step - accuracy: 0.8921 - loss: 0.2805 - val_accuracy: 0.8390 - val_loss: 0.3664
Epoch 6/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 1s 5ms/step - accuracy: 0.8964 - loss: 0.2835 - val_accuracy: 0.8439 - val_loss: 0.3493
Epoch 7/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9050 - loss: 0.2521 - val_accuracy: 0.8488 - val_loss: 0.3516
Epoch 8/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9046 - loss: 0.2469 - val_accuracy: 0.8488 - val_loss: 0.3343
Epoch 9/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9043 - loss: 0.2371 - val_accuracy: 0.8439 - val_loss: 0.3228
Epoch 10/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9007 - loss: 0.2647 - val_accuracy: 0.8439 - val_loss: 0.3142
Epoch 11/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9174 - loss: 0.2230 - val_accuracy: 0.8488 - val_loss: 0.3084
Epoch 12/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9198 - loss: 0.2327 - val_accuracy: 0.8537 - val_loss: 0.2939
Epoch 13/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9276 - loss: 0.2024 - val_accuracy: 0.8683 - val_loss: 0.2871
Epoch 14/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9131 - loss: 0.2281 - val_accuracy: 0.8732 - val_loss: 0.2662
Epoch 15/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9250 - loss: 0.1954 - val_accuracy: 0.8732 - val_loss: 0.2637
Epoch 16/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9238 - loss: 0.2077 - val_accuracy: 0.8683 - val_loss: 0.2516
Epoch 17/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9282 - loss: 0.1812 - val_accuracy: 0.8878 - val_loss: 0.2500
Epoch 18/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9317 - loss: 0.1811 - val_accuracy: 0.8780 - val_loss: 0.2390
Epoch 19/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9450 - loss: 0.1494 - val_accuracy: 0.8829 - val_loss: 0.2318
Epoch 20/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9348 - loss: 0.1546 - val_accuracy: 0.8732 - val_loss: 0.2441
Epoch 21/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9186 - loss: 0.1907 - val_accuracy: 0.9122 - val_loss: 0.2064
Epoch 22/100
https://colab.research.google.com/drive/1c9EiEUtD7lgc9NdQ8sPhq8Req_O4DE-4#scrollTo=cr8ANb3JjyZI&printMode=true 1/3
4/15/25, 2:21 PM ann_assignment.ipynb - Colab
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9520 - loss: 0.1554 - val_accuracy: 0.9024 - val_loss: 0.2024
Epoch 23/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9400 - loss: 0.1469 - val_accuracy: 0.9073 - val_loss: 0.1989
Epoch 24/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9486 - loss: 0.1502 - val_accuracy: 0.9220 - val_loss: 0.1891
Epoch 25/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9488 - loss: 0.1340 - val_accuracy: 0.9122 - val_loss: 0.1790
Epoch 26/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.9403 - loss: 0.1517 - val_accuracy: 0.9317 - val_loss: 0.1737
Epoch 27/100
52/52 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.9589 - loss: 0.1421 - val_accuracy: 0.9317 - val_loss: 0.1703
Epoch 28/100
# Accuracy plot
plt.plot(history.history['accuracy'], label='Train Accuracy')
plt.plot(history.history['val_accuracy'], label='Validation Accuracy')
plt.title('Model Accuracy Over Epochs')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
# Loss plot
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Model Loss Over Epochs')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.show()
https://colab.research.google.com/drive/1c9EiEUtD7lgc9NdQ8sPhq8Req_O4DE-4#scrollTo=cr8ANb3JjyZI&printMode=true 2/3
4/15/25, 2:21 PM ann_assignment.ipynb - Colab
# Evaluate on test data
loss, accuracy = model.evaluate(X_test, y_test)
print(f"Test Accuracy: {accuracy:.2f}")
# Confusion matrix
cm = confusion_matrix(y_test, y_pred_class)
sns.heatmap(cm, annot=True, fmt='d', cmap='Blues')
plt.title('Confusion Matrix')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()
# Classification report
print("Classification Report:")
print(classification_report(y_test, y_pred_class))
Classification Report:
precision recall f1-score support
https://colab.research.google.com/drive/1c9EiEUtD7lgc9NdQ8sPhq8Req_O4DE-4#scrollTo=cr8ANb3JjyZI&printMode=true 3/3