0% found this document useful (0 votes)
32 views3 pages

Logistic Reg

e

Uploaded by

shrutisuman03
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF or read online on Scribd
0% found this document useful (0 votes)
32 views3 pages

Logistic Reg

e

Uploaded by

shrutisuman03
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF or read online on Scribd
You are on page 1/ 3
9722922, 752 PM Unites Name: Shruti Suman ROLL: MCA/10011/21 #logistic Regression (2) import csv import numpy as np import matplotlib.pyplot as plt def loadcsv(#ilename) : function to load dataset with open(filenane,"r") as csvfile: Lines = csv.reader(csvfile) dataset = List(1ines) for i in range(1en(dataset)): cataset[i] = [float(x) for x in dataset[i]] return np.array(dataset) def normalize(x): function to normalize feature matrix, X mins = np.min(X, axis = 0) maxs = np.max(X, axis = 0) png = maxs - ins normX = 1 - ((maxs - X)/rng) return nora_X def logistic_func(beta, X): logistic(signoid) function return 1.0/(1 + np.exp(-np.dot(X, beta.T))) def log_gradient(beta, x, y): logistic gradient function First_calc = logistic_func(beta, x) - y.reshape(X.shape[@], -1) final_calc = np.dot(first_calc.T, x) return final_calc def cost_func(beta, x, y): cost function, 3 Loading (Mathlax/ja/output/CommonHTTMUfonts/Tex/fontdatajs | *) y= pS 7 localhost8#¢9inconvarthiml Untied jpynb?dowlo 9122122, 752 PM def def Unites step = y * np.Jog(1og_func_v) step2 = (1 - y) * np-log(1 - log_func_v) final = -step1 - step2 return np.mean(final) grad_desc(x, y, beta, Ir=.01, converge_change=.001): gradient descent function cost = cost_func(beta, X, y) change_cost = 1 nun_iter = 1 while(change_cost > converge_change) : ‘old_cost = cost beta = beta - (1r * log_gradient (beta, X, y)) cost = cost_func(beta, X, y) change_cost = old_cost - cost nun_iter += 2 return beta, num_iter pred_values(beta, x): function to predict labels pred_prob = logistic func(beta, x) pred_value = np.where(pred_prob >= .5, 1, 8) return np.squeeze(pred_value) plot_reg(X, y, beta): function to plot decision boundary # Labelled observations x0 = X[np.where(y == 0.0)] XA = X[np.where(y == 1.0)] # plotting points with diff color for diff Lobel plt.scatter([xf:, 1]], [<0[: plt.seatter([xaf:, 1], boas, 21], © # plotting decision boundary x1 = np.arange(@, 1, 0.1) x2 = -(beta[0,0] + beta[0,1]*x1)/betala,2] plt.plot(x1, x2, c='k', label='reg line') plt.xlabel(‘x1') plt.ylabel('x2") pt. legend() plt.show() Loading (Mathlax/ja/output/CommonHTML/fonts/Tex/fontdatajs localhost.a#6ginbconvarthiml Untied jpynb?download=false 20 9722922, 752 PM Unites # Load the dataset dataset = loadcsv( ‘data.csv') normalizing feature matrix normalize(dataset[:, :-1]) xs stacking colums with all ones in feature matrix np. hstack((np.matrix(np.ones(X.shape[0])).T, X)) xs # response vector y = dataset[:, -1] # initial beta values beta = np.matrix(np.zeros(X.shape[1])) # beta values after running gradient descent beta, numiter = grad_desc(x, y, beta) # estimated beta values and number of iterations print("Estimated regression coefficients:", beta) print("No. of iterations:", num_iter) # predicted Labels y_pred = pred_values(beta, X) # number of correctly predicted Labels print("Correctly predicted labels:", np.sum(y y_pred)) # plotting regression Line plot_reg(X, y, beta) Estimated regression coefficient: No. of iterations: 2612 Correctly predicted labels: 100 10 [[ 1.70474504 15.04062212 -20.47216021]] os 06 2 04 02 oo Loading (Mathlax/ja/output/CommonHTML/fonts/TeX/fontdatajs localhost 8#¢9inbconvarthiml Untied jpynb?downlo

You might also like