0% found this document useful (0 votes)
21 views7 pages

Experiment 4

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
21 views7 pages

Experiment 4

Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 7

Experiment_4

November 10, 2024

[ ]: #Akanksha Thigle
#u21ec058

[23]: ##import libraries


import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

[24]: ##import dataset


data=pd.read_csv(r"C:\Users\RIG\Downloads\detaset.csv")
data

[24]: x y
0 0.00 -1.179169
1 0.01 0.515011
2 0.02 -0.047497
3 0.03 0.160731
4 0.04 0.704209
... ... ...
996 9.96 21.899947
997 9.97 21.150909
998 9.98 19.900154
999 9.99 22.883209
1000 10.00 19.949295

[1001 rows x 2 columns]

[26]: ##visualize the dataset. plot on 2d figure.see linear relationship


x = data["x"]
y = data["y"]

plt.scatter(x, y, s=10)
plt.show()

1
[27]: ##make training and testing datasets
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test=train_test_split(x,y,test_size=0.
,→20,random_state=0)

[28]: print(x_train)
print(x_test)
print(y_train)
print(y_test)

465 4.65
399 3.99
511 5.11
818 8.18
951 9.51
...
835 8.35
192 1.92
629 6.29
559 5.59
684 6.84
Name: x, Length: 800, dtype: float64

2
708 7.08
533 5.33
298 2.98
356 3.56
833 8.33
...
991 9.91
215 2.15
654 6.54
150 1.50
342 3.42
Name: x, Length: 201, dtype: float64
465 11.428067
399 8.306230
511 10.100651
818 16.236373
951 18.930391
...
835 17.475608
192 2.530710
629 12.210384
559 10.978225
684 12.754473
Name: y, Length: 800, dtype: float64
708 14.329850
533 11.899805
298 5.398876
356 8.294682
833 18.148310
...
991 19.509641
215 2.819302
654 13.934812
150 4.771437
342 7.333522
Name: y, Length: 201, dtype: float64

[ ]:

[29]: ##write training function as follows


##initialize w and b, learning rate, and epochs (y=wx+b)
# inside loop
# find y_pred with w and b y_pred=wx+b
# find mse bw y_pred and y_true
# find gradients with respect to w and b
# update initial w and b
# observe loss for each epoch

3
# print final w and b
import numpy as np

def train_linear_regression(x, y, learning_rate=0.01, epochs=100):


# Step 1: Initialize parameters
w = 0.0 # Initial weight
b = 0.0 # Initial bias
n = len(x) # Number of data points

# Training loop
for epoch in range(epochs):
# Step 2: Predict y using the current w and b
y_pred = w * x + b

# Step 3: Compute Mean Squared Error (MSE)


mse = np.mean((y - y_pred) ** 2)

# Step 4: Compute gradients with respect to w and b


# Gradient of MSE with respect to w
dw = -(2/n) * np.sum(x * (y - y_pred))
# Gradient of MSE with respect to b
db = -(2/n) * np.sum(y - y_pred)

# Step 5: Update w and b using gradient descent


w -= learning_rate * dw
b -= learning_rate * db

# Observe loss for each epoch


print(f"Epoch {epoch+1}/{epochs}, Loss: {mse:.4f}, w: {w:.4f}, b: {b:.
4f}")
,→

# Step 6: Print the final values of w and b


print(f"\nFinal values: w = {w:.4f}, b = {b:.4f}")
return w, b

def predict(x, w, b):


return w * x + b

[30]: ##use final w and b to predict the y values for the test set
##plot line on training and testing dataset
w, b = train_linear_regression(x_train, y_train, learning_rate=0.01, epochs=100)

# Predict values for the training and testing datasets


y_train_pred = predict(x_train, w, b)
y_test_pred = predict(x_test, w, b)

# Plotting the results

4
plt.figure(figsize=(10, 6))

# Plot training data and prediction line


plt.scatter(x_train, y_train, color='purple',s=10, label='Training Data')
plt.plot(x_train, y_train_pred, color='black', label='Training Fit Line')

# Plot test data and prediction line


plt.scatter(x_test, y_test, color='pink',s=10, label='Test Data')
plt.plot(x_test, y_test_pred, color='yellow', label='Test Fit Line')

# Plot settings
plt.title('Linear Regression on Training and Test Data')
plt.xlabel('x-axis')
plt.ylabel('y-axis')
plt.grid(True)
plt.legend()
plt.show()

Epoch 1/100, Loss: 139.9001, w: 1.3430, b: 0.2061


Epoch 2/100, Loss: 16.5903, w: 1.7940, b: 0.2760
Epoch 3/100, Loss: 2.6788, w: 1.9453, b: 0.3003
Epoch 4/100, Loss: 1.1093, w: 1.9961, b: 0.3092
Epoch 5/100, Loss: 0.9321, w: 2.0130, b: 0.3129
Epoch 6/100, Loss: 0.9120, w: 2.0186, b: 0.3149
Epoch 7/100, Loss: 0.9096, w: 2.0203, b: 0.3163
Epoch 8/100, Loss: 0.9092, w: 2.0208, b: 0.3175
Epoch 9/100, Loss: 0.9091, w: 2.0209, b: 0.3186
Epoch 10/100, Loss: 0.9089, w: 2.0208, b: 0.3197
Epoch 11/100, Loss: 0.9088, w: 2.0206, b: 0.3208
Epoch 12/100, Loss: 0.9087, w: 2.0205, b: 0.3219
Epoch 13/100, Loss: 0.9086, w: 2.0203, b: 0.3229
Epoch 14/100, Loss: 0.9085, w: 2.0201, b: 0.3240
Epoch 15/100, Loss: 0.9083, w: 2.0200, b: 0.3251
Epoch 16/100, Loss: 0.9082, w: 2.0198, b: 0.3261
Epoch 17/100, Loss: 0.9081, w: 2.0197, b: 0.3272
Epoch 18/100, Loss: 0.9080, w: 2.0195, b: 0.3282
Epoch 19/100, Loss: 0.9079, w: 2.0193, b: 0.3293
Epoch 20/100, Loss: 0.9078, w: 2.0192, b: 0.3303
Epoch 21/100, Loss: 0.9077, w: 2.0190, b: 0.3313
Epoch 22/100, Loss: 0.9076, w: 2.0189, b: 0.3324
Epoch 23/100, Loss: 0.9074, w: 2.0187, b: 0.3334
Epoch 24/100, Loss: 0.9073, w: 2.0186, b: 0.3344
Epoch 25/100, Loss: 0.9072, w: 2.0184, b: 0.3354
Epoch 26/100, Loss: 0.9071, w: 2.0182, b: 0.3364
Epoch 27/100, Loss: 0.9070, w: 2.0181, b: 0.3374
Epoch 28/100, Loss: 0.9069, w: 2.0179, b: 0.3384
Epoch 29/100, Loss: 0.9068, w: 2.0178, b: 0.3394
Epoch 30/100, Loss: 0.9067, w: 2.0176, b: 0.3404

5
Epoch 31/100, Loss: 0.9066, w: 2.0175, b: 0.3414
Epoch 32/100, Loss: 0.9065, w: 2.0173, b: 0.3423
Epoch 33/100, Loss: 0.9064, w: 2.0172, b: 0.3433
Epoch 34/100, Loss: 0.9063, w: 2.0170, b: 0.3443
Epoch 35/100, Loss: 0.9062, w: 2.0169, b: 0.3452
Epoch 36/100, Loss: 0.9061, w: 2.0168, b: 0.3462
Epoch 37/100, Loss: 0.9061, w: 2.0166, b: 0.3472
Epoch 38/100, Loss: 0.9060, w: 2.0165, b: 0.3481
Epoch 39/100, Loss: 0.9059, w: 2.0163, b: 0.3490
Epoch 40/100, Loss: 0.9058, w: 2.0162, b: 0.3500
Epoch 41/100, Loss: 0.9057, w: 2.0160, b: 0.3509
Epoch 42/100, Loss: 0.9056, w: 2.0159, b: 0.3518
Epoch 43/100, Loss: 0.9055, w: 2.0158, b: 0.3528
Epoch 44/100, Loss: 0.9054, w: 2.0156, b: 0.3537
Epoch 45/100, Loss: 0.9053, w: 2.0155, b: 0.3546
Epoch 46/100, Loss: 0.9053, w: 2.0153, b: 0.3555
Epoch 47/100, Loss: 0.9052, w: 2.0152, b: 0.3564
Epoch 48/100, Loss: 0.9051, w: 2.0151, b: 0.3573
Epoch 49/100, Loss: 0.9050, w: 2.0149, b: 0.3582
Epoch 50/100, Loss: 0.9049, w: 2.0148, b: 0.3591
Epoch 51/100, Loss: 0.9048, w: 2.0147, b: 0.3600
Epoch 52/100, Loss: 0.9048, w: 2.0145, b: 0.3609
Epoch 53/100, Loss: 0.9047, w: 2.0144, b: 0.3617
Epoch 54/100, Loss: 0.9046, w: 2.0142, b: 0.3626
Epoch 55/100, Loss: 0.9045, w: 2.0141, b: 0.3635
Epoch 56/100, Loss: 0.9044, w: 2.0140, b: 0.3644
Epoch 57/100, Loss: 0.9044, w: 2.0139, b: 0.3652
Epoch 58/100, Loss: 0.9043, w: 2.0137, b: 0.3661
Epoch 59/100, Loss: 0.9042, w: 2.0136, b: 0.3669
Epoch 60/100, Loss: 0.9041, w: 2.0135, b: 0.3678
Epoch 61/100, Loss: 0.9041, w: 2.0133, b: 0.3686
Epoch 62/100, Loss: 0.9040, w: 2.0132, b: 0.3695
Epoch 63/100, Loss: 0.9039, w: 2.0131, b: 0.3703
Epoch 64/100, Loss: 0.9039, w: 2.0130, b: 0.3711
Epoch 65/100, Loss: 0.9038, w: 2.0128, b: 0.3719
Epoch 66/100, Loss: 0.9037, w: 2.0127, b: 0.3728
Epoch 67/100, Loss: 0.9036, w: 2.0126, b: 0.3736
Epoch 68/100, Loss: 0.9036, w: 2.0125, b: 0.3744
Epoch 69/100, Loss: 0.9035, w: 2.0123, b: 0.3752
Epoch 70/100, Loss: 0.9034, w: 2.0122, b: 0.3760
Epoch 71/100, Loss: 0.9034, w: 2.0121, b: 0.3768
Epoch 72/100, Loss: 0.9033, w: 2.0120, b: 0.3776
Epoch 73/100, Loss: 0.9032, w: 2.0118, b: 0.3784
Epoch 74/100, Loss: 0.9032, w: 2.0117, b: 0.3792
Epoch 75/100, Loss: 0.9031, w: 2.0116, b: 0.3800
Epoch 76/100, Loss: 0.9031, w: 2.0115, b: 0.3808
Epoch 77/100, Loss: 0.9030, w: 2.0114, b: 0.3815
Epoch 78/100, Loss: 0.9029, w: 2.0112, b: 0.3823

6
Epoch 79/100, Loss: 0.9029, w: 2.0111, b: 0.3831
Epoch 80/100, Loss: 0.9028, w: 2.0110, b: 0.3839
Epoch 81/100, Loss: 0.9028, w: 2.0109, b: 0.3846
Epoch 82/100, Loss: 0.9027, w: 2.0108, b: 0.3854
Epoch 83/100, Loss: 0.9026, w: 2.0107, b: 0.3861
Epoch 84/100, Loss: 0.9026, w: 2.0105, b: 0.3869
Epoch 85/100, Loss: 0.9025, w: 2.0104, b: 0.3876
Epoch 86/100, Loss: 0.9025, w: 2.0103, b: 0.3884
Epoch 87/100, Loss: 0.9024, w: 2.0102, b: 0.3891
Epoch 88/100, Loss: 0.9023, w: 2.0101, b: 0.3899
Epoch 89/100, Loss: 0.9023, w: 2.0100, b: 0.3906
Epoch 90/100, Loss: 0.9022, w: 2.0099, b: 0.3913
Epoch 91/100, Loss: 0.9022, w: 2.0098, b: 0.3920
Epoch 92/100, Loss: 0.9021, w: 2.0096, b: 0.3928
Epoch 93/100, Loss: 0.9021, w: 2.0095, b: 0.3935
Epoch 94/100, Loss: 0.9020, w: 2.0094, b: 0.3942
Epoch 95/100, Loss: 0.9020, w: 2.0093, b: 0.3949
Epoch 96/100, Loss: 0.9019, w: 2.0092, b: 0.3956
Epoch 97/100, Loss: 0.9019, w: 2.0091, b: 0.3963
Epoch 98/100, Loss: 0.9018, w: 2.0090, b: 0.3970
Epoch 99/100, Loss: 0.9018, w: 2.0089, b: 0.3977
Epoch 100/100, Loss: 0.9017, w: 2.0088, b: 0.3984

Final values: w = 2.0088, b = 0.3984

You might also like