Experiment 4
Experiment 4
[ ]: #Akanksha Thigle
#u21ec058
[24]: x y
0 0.00 -1.179169
1 0.01 0.515011
2 0.02 -0.047497
3 0.03 0.160731
4 0.04 0.704209
... ... ...
996 9.96 21.899947
997 9.97 21.150909
998 9.98 19.900154
999 9.99 22.883209
1000 10.00 19.949295
plt.scatter(x, y, s=10)
plt.show()
1
[27]: ##make training and testing datasets
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test=train_test_split(x,y,test_size=0.
,→20,random_state=0)
[28]: print(x_train)
print(x_test)
print(y_train)
print(y_test)
465 4.65
399 3.99
511 5.11
818 8.18
951 9.51
...
835 8.35
192 1.92
629 6.29
559 5.59
684 6.84
Name: x, Length: 800, dtype: float64
2
708 7.08
533 5.33
298 2.98
356 3.56
833 8.33
...
991 9.91
215 2.15
654 6.54
150 1.50
342 3.42
Name: x, Length: 201, dtype: float64
465 11.428067
399 8.306230
511 10.100651
818 16.236373
951 18.930391
...
835 17.475608
192 2.530710
629 12.210384
559 10.978225
684 12.754473
Name: y, Length: 800, dtype: float64
708 14.329850
533 11.899805
298 5.398876
356 8.294682
833 18.148310
...
991 19.509641
215 2.819302
654 13.934812
150 4.771437
342 7.333522
Name: y, Length: 201, dtype: float64
[ ]:
3
# print final w and b
import numpy as np
# Training loop
for epoch in range(epochs):
# Step 2: Predict y using the current w and b
y_pred = w * x + b
[30]: ##use final w and b to predict the y values for the test set
##plot line on training and testing dataset
w, b = train_linear_regression(x_train, y_train, learning_rate=0.01, epochs=100)
4
plt.figure(figsize=(10, 6))
# Plot settings
plt.title('Linear Regression on Training and Test Data')
plt.xlabel('x-axis')
plt.ylabel('y-axis')
plt.grid(True)
plt.legend()
plt.show()
5
Epoch 31/100, Loss: 0.9066, w: 2.0175, b: 0.3414
Epoch 32/100, Loss: 0.9065, w: 2.0173, b: 0.3423
Epoch 33/100, Loss: 0.9064, w: 2.0172, b: 0.3433
Epoch 34/100, Loss: 0.9063, w: 2.0170, b: 0.3443
Epoch 35/100, Loss: 0.9062, w: 2.0169, b: 0.3452
Epoch 36/100, Loss: 0.9061, w: 2.0168, b: 0.3462
Epoch 37/100, Loss: 0.9061, w: 2.0166, b: 0.3472
Epoch 38/100, Loss: 0.9060, w: 2.0165, b: 0.3481
Epoch 39/100, Loss: 0.9059, w: 2.0163, b: 0.3490
Epoch 40/100, Loss: 0.9058, w: 2.0162, b: 0.3500
Epoch 41/100, Loss: 0.9057, w: 2.0160, b: 0.3509
Epoch 42/100, Loss: 0.9056, w: 2.0159, b: 0.3518
Epoch 43/100, Loss: 0.9055, w: 2.0158, b: 0.3528
Epoch 44/100, Loss: 0.9054, w: 2.0156, b: 0.3537
Epoch 45/100, Loss: 0.9053, w: 2.0155, b: 0.3546
Epoch 46/100, Loss: 0.9053, w: 2.0153, b: 0.3555
Epoch 47/100, Loss: 0.9052, w: 2.0152, b: 0.3564
Epoch 48/100, Loss: 0.9051, w: 2.0151, b: 0.3573
Epoch 49/100, Loss: 0.9050, w: 2.0149, b: 0.3582
Epoch 50/100, Loss: 0.9049, w: 2.0148, b: 0.3591
Epoch 51/100, Loss: 0.9048, w: 2.0147, b: 0.3600
Epoch 52/100, Loss: 0.9048, w: 2.0145, b: 0.3609
Epoch 53/100, Loss: 0.9047, w: 2.0144, b: 0.3617
Epoch 54/100, Loss: 0.9046, w: 2.0142, b: 0.3626
Epoch 55/100, Loss: 0.9045, w: 2.0141, b: 0.3635
Epoch 56/100, Loss: 0.9044, w: 2.0140, b: 0.3644
Epoch 57/100, Loss: 0.9044, w: 2.0139, b: 0.3652
Epoch 58/100, Loss: 0.9043, w: 2.0137, b: 0.3661
Epoch 59/100, Loss: 0.9042, w: 2.0136, b: 0.3669
Epoch 60/100, Loss: 0.9041, w: 2.0135, b: 0.3678
Epoch 61/100, Loss: 0.9041, w: 2.0133, b: 0.3686
Epoch 62/100, Loss: 0.9040, w: 2.0132, b: 0.3695
Epoch 63/100, Loss: 0.9039, w: 2.0131, b: 0.3703
Epoch 64/100, Loss: 0.9039, w: 2.0130, b: 0.3711
Epoch 65/100, Loss: 0.9038, w: 2.0128, b: 0.3719
Epoch 66/100, Loss: 0.9037, w: 2.0127, b: 0.3728
Epoch 67/100, Loss: 0.9036, w: 2.0126, b: 0.3736
Epoch 68/100, Loss: 0.9036, w: 2.0125, b: 0.3744
Epoch 69/100, Loss: 0.9035, w: 2.0123, b: 0.3752
Epoch 70/100, Loss: 0.9034, w: 2.0122, b: 0.3760
Epoch 71/100, Loss: 0.9034, w: 2.0121, b: 0.3768
Epoch 72/100, Loss: 0.9033, w: 2.0120, b: 0.3776
Epoch 73/100, Loss: 0.9032, w: 2.0118, b: 0.3784
Epoch 74/100, Loss: 0.9032, w: 2.0117, b: 0.3792
Epoch 75/100, Loss: 0.9031, w: 2.0116, b: 0.3800
Epoch 76/100, Loss: 0.9031, w: 2.0115, b: 0.3808
Epoch 77/100, Loss: 0.9030, w: 2.0114, b: 0.3815
Epoch 78/100, Loss: 0.9029, w: 2.0112, b: 0.3823
6
Epoch 79/100, Loss: 0.9029, w: 2.0111, b: 0.3831
Epoch 80/100, Loss: 0.9028, w: 2.0110, b: 0.3839
Epoch 81/100, Loss: 0.9028, w: 2.0109, b: 0.3846
Epoch 82/100, Loss: 0.9027, w: 2.0108, b: 0.3854
Epoch 83/100, Loss: 0.9026, w: 2.0107, b: 0.3861
Epoch 84/100, Loss: 0.9026, w: 2.0105, b: 0.3869
Epoch 85/100, Loss: 0.9025, w: 2.0104, b: 0.3876
Epoch 86/100, Loss: 0.9025, w: 2.0103, b: 0.3884
Epoch 87/100, Loss: 0.9024, w: 2.0102, b: 0.3891
Epoch 88/100, Loss: 0.9023, w: 2.0101, b: 0.3899
Epoch 89/100, Loss: 0.9023, w: 2.0100, b: 0.3906
Epoch 90/100, Loss: 0.9022, w: 2.0099, b: 0.3913
Epoch 91/100, Loss: 0.9022, w: 2.0098, b: 0.3920
Epoch 92/100, Loss: 0.9021, w: 2.0096, b: 0.3928
Epoch 93/100, Loss: 0.9021, w: 2.0095, b: 0.3935
Epoch 94/100, Loss: 0.9020, w: 2.0094, b: 0.3942
Epoch 95/100, Loss: 0.9020, w: 2.0093, b: 0.3949
Epoch 96/100, Loss: 0.9019, w: 2.0092, b: 0.3956
Epoch 97/100, Loss: 0.9019, w: 2.0091, b: 0.3963
Epoch 98/100, Loss: 0.9018, w: 2.0090, b: 0.3970
Epoch 99/100, Loss: 0.9018, w: 2.0089, b: 0.3977
Epoch 100/100, Loss: 0.9017, w: 2.0088, b: 0.3984