Dsbda 5
Dsbda 5
print(f'Accuracy: {accuracy}')
print(f'Confusion Matrix:\n{conf_matrix}')
print(f'Classification Report:\n{classification_rep}')
Accuracy: 0.89
Confusion Matrix:
[[65 3]
[ 8 24]]
Classification Report:
precision recall f1-score support
# Combine the actual labels and predicted labels into a DataFrame for
comparison
results_df = pd.DataFrame({'Actual': y_test, 'Predicted': y_pred})
# Print the DataFrame to see the actual and predicted labels side by
side
print("\nActual vs Predicted Labels:")
print(results_df)
correctly_classified_samples = results_df[results_df['Actual'] ==
results_df['Predicted']].head(10)
print("\nFirst 10 Samples with Correct Classification:")
print(correctly_classified_samples)
# Metrics calculations
accuracy = (TP + TN) / (TP + TN + FP + FN)
error_rate = (FP + FN) / (TP + TN + FP + FN)
precision = TP / (TP + FP)
recall = TP / (TP + FN)