PCA Codebase
PCA Codebase
PCA Codebase
Harsh kumar
roll no 12112011
batch cs-a-01
import math
from collections import Counter
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
df = pd.read_csv('data.csv')
X = df.drop(columns=['id', 'diagnosis'])
y = df['diagnosis'].map({'M': 1, 'B': 0})
class SVM:
def __init__(self, learning_rate=0.001, lambda_param=0.01,
n_iters=1000):
self.lr = learning_rate
self.lambda_param = lambda_param
self.n_iters = n_iters
self.w = None
self.b = None
for _ in range(self.n_iters):
for idx, x_i in enumerate(X):
condition = y_[idx] * (np.dot(x_i, self.w) - self.b)
>= 1
if condition:
self.w -= self.lr * (2 * self.lambda_param *
self.w)
else:
self.w -= self.lr * (2 * self.lambda_param *
self.w - np.dot(x_i, y_[idx]))
self.b -= self.lr * y_[idx]
import numpy as np
class SVM:
def __init__(self, learning_rate=0.001, lambda_param=0.01,
n_iters=1000):
self.lr = learning_rate
self.lambda_param = lambda_param
self.n_iters = n_iters
self.w = None
self.b = None
# Gradient Descent
for _ in range(self.n_iters):
for idx, x_i in enumerate(X):
condition = y_[idx] * (np.dot(x_i, self.w) - self.b)
>= 1
if condition:
self.w -= self.lr * (2 * self.lambda_param *
self.w)
else:
self.w -= self.lr * (2 * self.lambda_param *
self.w - np.dot(x_i, y_[idx]))
self.b -= self.lr * y_[idx]
X_train = X[:split_index].values
y_train = y[:split_index].values
X_test = X[split_index:].values
y_test = y[split_index:].values
svm = SVM()
svm.fit(X_train, y_train)
y_pred_svm = svm.predict(X_test)