0% found this document useful (0 votes)
11 views2 pages

Logistic Exp1

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views2 pages

Logistic Exp1

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 2

logistic-exp1 26/8/24, 1:04 PM

In [3]: import numpy as np


import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import accuracy_score

In [4]: # Load the dataset from CSV file


insurance_data = pd.read_csv('insurance_dataset.csv')

In [5]: # Display the dimensions of the dataset


print("Dimensions of the insurance dataset:", insurance_data.shape)

Dimensions of the insurance dataset: (1338, 5)

In [6]: # Convert categorical variables to numerical using one-hot encoding


insurance_data = pd.get_dummies(insurance_data, columns=['age','sex
', 'smoker', 'region'])

# Extract features and target variable


X = insurance_data.drop(['charges'],axis=1)
y = (insurance_data['charges'] > np.mean(insurance_data['charges'])
).astype(int) # Binary classification

# Split the dataset into training and testing sets. Chosen standard
70/30 ratio for data split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size
=0.3, random_state=42)

In [7]: # Display the dimensions of the x_test


print("Dimensions of the x_test:", X_test.shape)

Dimensions of the x_test: (402, 55)

In [8]: # Standardize features (optional but recommended for logistic regre


ssion)
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

http://localhost:8833/nbconvert/html/Desktop/MTECH/2ndsem/exp1/logistic-exp1.ipynb?download=false Page 1 of 2
logistic-exp1 26/8/24, 1:04 PM

In [9]: # Implement logistic regression as a single neuron


class LogisticNeuron:
def __init__(self, input_dim):
self.weights = np.zeros(input_dim)
self.bias = 0

def sigmoid(self, z):


return 1 / (1 + np.exp(-z))

def predict(self, X):


linear_output = np.dot(X, self.weights) + self.bias
return self.sigmoid(linear_output)

def train(self, X, y, learning_rate=0.1, epochs=100):


for _ in range(epochs):
predictions = self.predict(X)
errors = y - predictions
self.weights += learning_rate * np.dot(X.T, errors)
self.bias += learning_rate * np.sum(errors)

In [10]: # Initialize and train the logistic neuron


neuron = LogisticNeuron(input_dim=X_train.shape[1])
neuron.train(X_train, y_train)

# Make predictions on the test set


y_pred = np.round(neuron.predict(X_test))

# Calculate accuracy
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)

Accuracy: 0.8432835820895522

http://localhost:8833/nbconvert/html/Desktop/MTECH/2ndsem/exp1/logistic-exp1.ipynb?download=false Page 2 of 2

You might also like