Experiment 8
Experiment 8
# Train/test split
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.3, random_state=42)
# Scale features
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)
X_all = np.vstack((X_train, X_test))
y_all = np.concatenate((y_train, y_test))
for k in k_values:
knn = KNeighborsClassifier(n_neighbors=k)
knn.fit(X_train, y_train)
acc = knn.score(X_test, y_test)
knn_accuracies.append(acc)
if k in [1, 3, 7]: # Only visualize a few
plot_decision_boundary(knn, X_all, y_all, f"KNN (k={k})")
# Plot Accuracy vs K
plt.figure(figsize=(6, 4))
plt.plot(k_values, knn_accuracies, marker='o', label="KNN Accuracy")
plt.axhline(y=tree_acc, color='r', linestyle='--', label=f"Decision
Tree Accuracy = {tree_acc:.2f}")
plt.title("KNN Accuracy vs. K")
plt.xlabel("K (Number of Neighbors)")
plt.ylabel("Accuracy")
plt.legend()
plt.tight_layout()
plt.show()