0% found this document useful (0 votes)
10 views17 pages

BA STOCK PRICE Project Neural Network Analysis

BA STOCK PRICE project neural network analysis

Uploaded by

o17847999
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
10 views17 pages

BA STOCK PRICE Project Neural Network Analysis

BA STOCK PRICE project neural network analysis

Uploaded by

o17847999
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 17

In [61]: # Import python libraries required in this example:

from sklearn.preprocessing import MinMaxScaler

scaler = MinMaxScaler()
x_scaled = scaler.fit_transform(X) # Scale features
y_scaled = scaler.fit_transform(y.reshape(-1, 1)) # Scale target
from sklearn.preprocessing import MinMaxScaler

# Define and fit the scaler


scaler_x = MinMaxScaler()
scaler_y = MinMaxScaler()

# Scale features and target


x_scaled = scaler_x.fit_transform(x)
y_scaled = scaler_y.fit_transform(y.reshape(-1, 1))

from keras.models import Sequential


from keras.layers import Dense, Activation
from sklearn.model_selection import train_test_split
import numpy as np
import pandas as pd
# Load the CSV file
dataset = pd.read_csv("C:/Users/mroky/Downloads/SBILIFE.csv", header=0)

# Display the first few rows of the dataset


dataset.head()

Out[61]: Date Adj Close Close High Low Open Volume

03-10-
0 696.950440 707.549988 740.000000 701.700012 735.000000 35367877
2017

04-10-
1 693.108887 703.650024 710.750000 699.000000 710.150024 3613280
2017

05-10-
2 684.145203 694.549988 704.549988 686.099976 704.549988 2680742
2017

06-10-
3 686.312317 696.750000 702.000000 689.000000 693.900024 1416485
2017

09-10-
4 688.183838 698.650024 702.500000 694.099976 694.099976 951473
2017

In [45]: from keras.models import Sequential


from keras.layers import Dense, Input

# Define the Neural Network model


model = Sequential()

# Input Layer
model.add(Input(shape=(5,))) # Takes 5 input features

# Hidden Layers
model.add(Dense(64, activation='relu')) # First hidden layer with 64 neurons
model.add(Dense(32, activation='relu')) # Second hidden layer with 32 neurons

# Output Layer
model.add(Dense(1)) # Output layer for regression

# Compile the model


model.compile(optimizer='adam', loss='mse', metrics=['mae'])

# Print the summary of the model


model.summary()

Model: "sequential_4"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━
┃ Layer (type) ┃ Output Shape ┃ Param #
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━
│ dense_11 (Dense) │ (None, 64) │ 384
├─────────────────────────────────┼────────────────────────┼──────────────
│ dense_12 (Dense) │ (None, 32) │ 2,080
├─────────────────────────────────┼────────────────────────┼──────────────
│ dense_13 (Dense) │ (None, 1) │ 33
└─────────────────────────────────┴────────────────────────┴──────────────
 
Total params: 2,497 (9.75 KB)
Trainable params: 2,497 (9.75 KB)
Non-trainable params: 0 (0.00 B)

In [11]: # Select input features and target


x = np.array(dataset[['Open', 'High', 'Low', 'Close', 'Volume']]) # Features
y = np.array(dataset['Adj Close']) # Target

In [51]: from sklearn.model_selection import train_test_split

x_train, x_test, y_train, y_test = train_test_split(x_scaled, y_scaled, train_si

history = model.fit(x_train, y_train, epochs=50, batch_size=32, validation_data=

# Evaluate model
y_pred = model.predict(x_test)
from sklearn.metrics import mean_squared_error, r2_score

mse = mean_squared_error(y_test, y_pred)


r2 = r2_score(y_test, y_pred)

print(f"Mean Squared Error (MSE): {mse}")


print(f"R-Squared (R²): {r2}")
Epoch 1/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 2s 8ms/step - loss: 0.1022 - mae: 0.2340 - val_loss:
0.0021 - val_mae: 0.0390
Epoch 2/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 7.8736e-04 - mae: 0.0203 - val_los
s: 4.0259e-05 - val_mae: 0.0050
Epoch 3/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 4.4403e-05 - mae: 0.0049 - val_los
s: 3.4460e-05 - val_mae: 0.0044
Epoch 4/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 3.6779e-05 - mae: 0.0046 - val_los
s: 4.8054e-05 - val_mae: 0.0052
Epoch 5/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 4.3254e-05 - mae: 0.0048 - val_los
s: 3.6191e-05 - val_mae: 0.0045
Epoch 6/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 3.5161e-05 - mae: 0.0044 - val_los
s: 3.1964e-05 - val_mae: 0.0042
Epoch 7/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 3.1491e-05 - mae: 0.0042 - val_los
s: 3.1075e-05 - val_mae: 0.0041
Epoch 8/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 3.3932e-05 - mae: 0.0044 - val_los
s: 3.0335e-05 - val_mae: 0.0040
Epoch 9/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - loss: 3.4372e-05 - mae: 0.0044 - val_los
s: 2.9572e-05 - val_mae: 0.0040
Epoch 10/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.7555e-05 - mae: 0.0039 - val_los
s: 3.2552e-05 - val_mae: 0.0042
Epoch 11/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.9120e-05 - mae: 0.0040 - val_los
s: 2.6243e-05 - val_mae: 0.0038
Epoch 12/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.6731e-05 - mae: 0.0040 - val_los
s: 2.6182e-05 - val_mae: 0.0037
Epoch 13/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.7030e-05 - mae: 0.0039 - val_los
s: 2.6240e-05 - val_mae: 0.0037
Epoch 14/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.4393e-05 - mae: 0.0037 - val_los
s: 2.8951e-05 - val_mae: 0.0040
Epoch 15/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.7684e-05 - mae: 0.0040 - val_los
s: 3.3534e-05 - val_mae: 0.0043
Epoch 16/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.6043e-05 - mae: 0.0037 - val_los
s: 2.1555e-05 - val_mae: 0.0034
Epoch 17/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.2033e-05 - mae: 0.0035 - val_los
s: 2.5500e-05 - val_mae: 0.0037
Epoch 18/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.1712e-05 - mae: 0.0035 - val_los
s: 2.1846e-05 - val_mae: 0.0034
Epoch 19/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 2.0027e-05 - mae: 0.0034 - val_los
s: 1.9428e-05 - val_mae: 0.0032
Epoch 20/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.9016e-05 - mae: 0.0033 - val_los
s: 2.0490e-05 - val_mae: 0.0032
Epoch 21/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - loss: 1.7645e-05 - mae: 0.0031 - val_los
s: 1.7910e-05 - val_mae: 0.0030
Epoch 22/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.5322e-05 - mae: 0.0030 - val_los
s: 1.8540e-05 - val_mae: 0.0030
Epoch 23/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.7300e-05 - mae: 0.0032 - val_los
s: 1.7417e-05 - val_mae: 0.0029
Epoch 24/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.4489e-05 - mae: 0.0029 - val_los
s: 2.0142e-05 - val_mae: 0.0032
Epoch 25/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - loss: 1.2851e-05 - mae: 0.0028 - val_los
s: 1.6084e-05 - val_mae: 0.0028
Epoch 26/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.3183e-05 - mae: 0.0027 - val_los
s: 1.5766e-05 - val_mae: 0.0027
Epoch 27/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.3698e-05 - mae: 0.0028 - val_los
s: 1.3495e-05 - val_mae: 0.0026
Epoch 28/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.6337e-05 - mae: 0.0031 - val_los
s: 1.4873e-05 - val_mae: 0.0025
Epoch 29/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.0764e-05 - mae: 0.0025 - val_los
s: 1.2112e-05 - val_mae: 0.0024
Epoch 30/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.2682e-05 - mae: 0.0027 - val_los
s: 1.2195e-05 - val_mae: 0.0023
Epoch 31/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - loss: 1.1418e-05 - mae: 0.0026 - val_los
s: 1.1185e-05 - val_mae: 0.0023
Epoch 32/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 1.2174e-05 - mae: 0.0026 - val_los
s: 1.1829e-05 - val_mae: 0.0023
Epoch 33/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 7.1876e-06 - mae: 0.0020 - val_los
s: 1.1633e-05 - val_mae: 0.0023
Epoch 34/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 8.2215e-06 - mae: 0.0022 - val_los
s: 1.4666e-05 - val_mae: 0.0027
Epoch 35/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 8.8035e-06 - mae: 0.0023 - val_los
s: 9.5824e-06 - val_mae: 0.0020
Epoch 36/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 6.4789e-06 - mae: 0.0019 - val_los
s: 1.1484e-05 - val_mae: 0.0023
Epoch 37/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 8.3368e-06 - mae: 0.0022 - val_los
s: 9.0809e-06 - val_mae: 0.0020
Epoch 38/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.8682e-06 - mae: 0.0019 - val_los
s: 1.5408e-05 - val_mae: 0.0029
Epoch 39/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 9.7513e-06 - mae: 0.0024 - val_los
s: 8.1319e-06 - val_mae: 0.0018
Epoch 40/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 6.3249e-06 - mae: 0.0019 - val_los
s: 8.5983e-06 - val_mae: 0.0018
Epoch 41/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.2720e-06 - mae: 0.0018 - val_los
s: 8.7595e-06 - val_mae: 0.0019
Epoch 42/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.6100e-06 - mae: 0.0018 - val_los
s: 1.3256e-05 - val_mae: 0.0027
Epoch 43/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 6.6243e-06 - mae: 0.0020 - val_los
s: 8.6659e-06 - val_mae: 0.0019
Epoch 44/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - loss: 4.9930e-06 - mae: 0.0017 - val_los
s: 1.2824e-05 - val_mae: 0.0026
Epoch 45/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 6.8258e-06 - mae: 0.0020 - val_los
s: 8.9601e-06 - val_mae: 0.0019
Epoch 46/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.8128e-06 - mae: 0.0018 - val_los
s: 7.4705e-06 - val_mae: 0.0016
Epoch 47/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.6041e-06 - mae: 0.0018 - val_los
s: 7.2558e-06 - val_mae: 0.0016
Epoch 48/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 4.5891e-06 - mae: 0.0016 - val_los
s: 1.0711e-05 - val_mae: 0.0022
Epoch 49/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 4.7474e-06 - mae: 0.0017 - val_los
s: 1.1764e-05 - val_mae: 0.0025
Epoch 50/50
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - loss: 5.1460e-06 - mae: 0.0017 - val_los
s: 1.0822e-05 - val_mae: 0.0022
12/12 ━━━━━━━━━━━━━━━━━━━━ 0s 6ms/step
Mean Squared Error (MSE): 1.0822424626158424e-05
R-Squared (R²): 0.9997992917716426

In [15]: print(len(x_train))
print(len(y_train))
print(len(x_test))
print(len(y_test))
y_test

1418
1418
355
355
Out[15]: array([ 648.2655029, 1180.099243 , 1489.949951 , 1103.567749 ,
657.8944702, 846.9360962, 1562.300049 , 743.2063599,
647.2775879, 1116.467896 , 560.5886841, 801.043335 ,
862.052002 , 931.4364014, 663.6768188, 1113.72937 ,
989.9175415, 666.4147949, 1406.274048 , 644.8571777,
643.5131836, 1706.050049 , 1238.163208 , 1291.430298 ,
980.3028564, 1223.488159 , 852.6355591, 1462. ,
1120.855713 , 859.7722168, 873.1038818, 664.0508423,
1280.188721 , 944.4211426, 678.6784058, 1129.453979 ,
1160.136719 , 1249.167358 , 555.8466797, 1131.790039 ,
1116.368408 , 1347.729126 , 1875.949951 , 1155.000366 ,
1326.966431 , 1152.962646 , 1145.597046 , 662.1456299,
653.8986816, 1060.744629 , 1753.650024 , 1558.800049 ,
1840.949951 , 575.9505615, 668.0894165, 655.875061 ,
1258.328613 , 699.2652588, 908.9310303, 615.4669189,
866.2645874, 1562.849976 , 1191.630127 , 689.0210571,
881.1327515, 982.5877686, 1212.75293 , 1157.882935 ,
1182.991943 , 703.9441528, 595.5605469, 1153.608765 ,
1245.084351 , 1217.897095 , 906.1607056, 650.8834229,
867.8504639, 689.5134888, 656.3184814, 1331.807739 ,
1100.583862 , 673.6547852, 1308.272095 , 867.7018433,
1066.923462 , 1408.020874 , 1136.435181 , 1268.271973 ,
983.4252319, 1307.525269 , 867.2318726, 789.446167 ,
1212.554077 , 646.2712402, 1231.29126 , 1148.736694 ,
1260.719482 , 1506.943237 , 697.8587036, 1188.051392 ,
1505.745239 , 662.8687744, 1220.807129 , 717.3359375,
539.9907227, 1569.949951 , 991.0078735, 667.8261108,
877.3164673, 820.3717651, 1501.099976 , 1294.874023 ,
567.8991699, 890.9951782, 650.112793 , 1085.717285 ,
1888.75 , 981.1453247, 1095.305786 , 1183.274536 ,
720.4580688, 1442.849976 , 593.8811035, 755.4478149,
1289.052002 , 821.4125366, 1254.096924 , 594.921875 ,
677.7051392, 644.779541 , 1503.149902 , 542.8063354,
1281.234375 , 1089.743164 , 1494.900024 , 689.4829712,
903.5834961, 735.4254761, 1546.372314 , 1468.050049 ,
1273.516357 , 808.9728394, 961.1229858, 1264.653198 ,
864.4804688, 693.6014404, 680.9586182, 1287.387451 ,
1063.238892 , 1452.199951 , 1744.900024 , 1185.367676 ,
596.6565552, 1438.016968 , 1146.302734 , 996.156189 ,
1392.798218 , 1633.199951 , 1128.609131 , 564.0957031,
833.0592041, 615.8126831, 670.69104 , 664.543396 ,
898.7266235, 1064.147095 , 1466.815186 , 1279.889893 ,
953.1437378, 1843.699951 , 778.4437256, 1290.232422 ,
1674.5 , 656.1688232, 656.6627808, 582.680481 ,
565.8739014, 1121.385742 , 622.1382446, 612.6019897,
866.8097534, 535.4957886, 1622.150024 , 673.1776123,
1257.980835 , 828.0039673, 572.394165 , 1226.162842 ,
975.6793213, 600.4013062, 902.2949219, 1116.816406 ,
1429.532227 , 978.5123291, 1432.726563 , 570.2701416,
1152.565063 , 1166.183105 , 794.6499634, 1288.984741 ,
661.9974976, 674.0202026, 794.2039185, 707.8202515,
752.3254395, 1184.622192 , 665.3804932, 892.4301758,
1342.787964 , 1238.874634 , 822.1063843, 962.6098633,
1391.800049 , 842.7234497, 1127.963013 , 960.5205688,
814.0280762, 1526.408203 , 755.2495728, 1043.769653 ,
1449.89563 , 1179.552734 , 1435.072266 , 694.5876465,
570.319519 , 1268.238281 , 590.9668579, 553.0805054,
558.5634766, 868.6930542, 823.1967773, 556.9827271,
876.6722412, 972.2002563, 994.9633179, 556.9334106,
1317.633301 , 798.8130493, 1274.959839 , 1124.931152 ,
907.8457031, 827.2110596, 1434.199951 , 1253.848022 ,
842.2279053, 696.9504395, 612.5032349, 1123.589355 ,
1103.717407 , 1440.949951 , 1311.793701 , 646.5141602,
1184.921631 , 831.8202515, 1262.263062 , 1074.093872 ,
614.923584 , 610.8803101, 1284.093506 , 693.2565918,
1624.150024 , 1139.024292 , 966.5250854, 994.0310669,
670.8391724, 1503.349976 , 665.3069458, 1427.400024 ,
600.598877 , 1323.822021 , 869.6346436, 1015.489746 ,
1121.800171 , 773.8841553, 658.3869019, 856.3030396,
723.5309448, 671.77771 , 853.2797852, 1256.835693 ,
667.9248657, 682.7929688, 652.1813354, 1450.744141 ,
747.4685059, 704.2888184, 694.7363281, 1737.400024 ,
1232.138062 , 822.7506104, 1433.425171 , 1290.744873 ,
724.7203369, 729.4285889, 982.7808838, 1488.576172 ,
856.6003418, 1255.441406 , 1510.550049 , 677.3978271,
1170.556641 , 1154.403931 , 639.6223755, 1733.75 ,
916.8656006, 665.2575073, 764.2199097, 861.0111694,
1086.065186 , 583.8044434, 1285.964722 , 738.052002 ,
636.4038086, 1608.800049 , 784.6883545, 791.2798462,
1150.676025 , 693.8442993, 776.6595459, 1272.520508 ,
570.5665283, 989.8938599, 899.3708496, 660.2584839,
647.0306396, 613.8862915, 640.7079468, 864.3317261,
1128.459961 , 728.6852417, 840.6418457, 979.0638428,
833.8521729, 835.0911865, 1289.234253 , 1299.715454 ,
1353.917969 , 1188.84668 , 1515.228271 , 853.6762085,
624.2120361, 658.3869019, 767.6395874, 1000.877747 ,
1100.782959 , 611.2189331, 1517.67395 , 868.0983276,
571.9002075, 855.3613281, 817.001709 , 1239.607056 ,
1144.949707 , 657.0578613, 1920.150024 ])

In [17]: mdl = model.fit(x_train,y_train, epochs=150)


Epoch 1/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 1s 2ms/step - accuracy: 0.0000e+00 - loss: 1156745.000
0
Epoch 2/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1158945.125
0
Epoch 3/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1115420.625
0
Epoch 4/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1140450.375
0
Epoch 5/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1161765.500
0
Epoch 6/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1142074.000
0
Epoch 7/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1184028.750
0
Epoch 8/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1137340.625
0
Epoch 9/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1146930.875
0
Epoch 10/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1163203.875
0
Epoch 11/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1127927.375
0
Epoch 12/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141430.625
0
Epoch 13/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1147881.500
0
Epoch 14/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135175.625
0
Epoch 15/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1140118.375
0
Epoch 16/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1166206.750
0
Epoch 17/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135278.500
0
Epoch 18/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1148524.625
0
Epoch 19/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1139532.375
0
Epoch 20/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1191808.250
0
Epoch 21/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1132333.125
0
Epoch 22/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1169667.125
0
Epoch 23/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1161312.500
0
Epoch 24/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1139735.875
0
Epoch 25/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1150285.375
0
Epoch 26/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1133829.875
0
Epoch 27/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1152347.500
0
Epoch 28/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1115652.125
0
Epoch 29/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1157168.125
0
Epoch 30/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1178652.375
0
Epoch 31/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1148324.250
0
Epoch 32/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141595.375
0
Epoch 33/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1138365.250
0
Epoch 34/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1144043.625
0
Epoch 35/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1150024.125
0
Epoch 36/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1127969.625
0
Epoch 37/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1157671.125
0
Epoch 38/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1130719.000
0
Epoch 39/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1149299.625
0
Epoch 40/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1153905.750
0
Epoch 41/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1142460.375
0
Epoch 42/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1142494.625
0
Epoch 43/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1147169.875
0
Epoch 44/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1150138.125
0
Epoch 45/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1145374.500
0
Epoch 46/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1115124.250
0
Epoch 47/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1147224.500
0
Epoch 48/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1134572.500
0
Epoch 49/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1153608.375
0
Epoch 50/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1146074.000
0
Epoch 51/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1137931.000
0
Epoch 52/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1129200.375
0
Epoch 53/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1162319.750
0
Epoch 54/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1175249.250
0
Epoch 55/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1145511.375
0
Epoch 56/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1130121.750
0
Epoch 57/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1171826.875
0
Epoch 58/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1136539.625
0
Epoch 59/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1159832.250
0
Epoch 60/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1121376.375
0
Epoch 61/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1146397.250
0
Epoch 62/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1160408.875
0
Epoch 63/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1146383.000
0
Epoch 64/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1168519.875
0
Epoch 65/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1132016.000
0
Epoch 66/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1135068.875
0
Epoch 67/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.0000e+00 - loss: 1162024.500
0
Epoch 68/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135022.000
0
Epoch 69/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1117794.750
0
Epoch 70/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1108597.250
0
Epoch 71/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1176514.250
0
Epoch 72/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1117999.000
0
Epoch 73/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1142419.000
0
Epoch 74/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1118402.375
0
Epoch 75/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1163048.000
0
Epoch 76/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1176142.375
0
Epoch 77/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1169735.000
0
Epoch 78/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141455.875
0
Epoch 79/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1151991.750
0
Epoch 80/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1145233.875
0
Epoch 81/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1137013.625
0
Epoch 82/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1137346.625
0
Epoch 83/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1130149.875
0
Epoch 84/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1163400.500
0
Epoch 85/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1162552.750
0
Epoch 86/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1136935.500
0
Epoch 87/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1151708.750
0
Epoch 88/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1160728.000
0
Epoch 89/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1142228.125
0
Epoch 90/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1174468.750
0
Epoch 91/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1131242.000
0
Epoch 92/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135613.250
0
Epoch 93/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1134944.125
0
Epoch 94/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1157665.875
0
Epoch 95/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1138299.125
0
Epoch 96/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1138603.250
0
Epoch 97/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1116711.625
0
Epoch 98/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141609.375
0
Epoch 99/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1157318.125
0
Epoch 100/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1130722.875
0
Epoch 101/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1126237.125
0
Epoch 102/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1149312.875
0
Epoch 103/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1173804.000
0
Epoch 104/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1143746.625
0
Epoch 105/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1131436.625
0
Epoch 106/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141822.875
0
Epoch 107/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1158962.125
0
Epoch 108/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1174430.125
0
Epoch 109/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135355.125
0
Epoch 110/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1158709.750
0
Epoch 111/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1148061.625
0
Epoch 112/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1164921.875
0
Epoch 113/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1167503.000
0
Epoch 114/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1143632.000
0
Epoch 115/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1165936.000
0
Epoch 116/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1169294.625
0
Epoch 117/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1126916.000
0
Epoch 118/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1149425.000
0
Epoch 119/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1165421.000
0
Epoch 120/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1163210.250
0
Epoch 121/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 4ms/step - accuracy: 0.0000e+00 - loss: 1162839.750
0
Epoch 122/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1179867.750
0
Epoch 123/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1126059.500
0
Epoch 124/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1140656.375
0
Epoch 125/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1144877.000
0
Epoch 126/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1161702.750
0
Epoch 127/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1161172.375
0
Epoch 128/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1163020.625
0
Epoch 129/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1151156.000
0
Epoch 130/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135996.125
0
Epoch 131/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1120361.250
0
Epoch 132/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1157632.375
0
Epoch 133/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1135559.125
0
Epoch 134/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1134309.375
0
Epoch 135/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1138148.000
0
Epoch 136/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1132258.125
0
Epoch 137/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1141614.250
0
Epoch 138/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1107767.000
0
Epoch 139/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1154720.250
0
Epoch 140/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1150046.750
0
Epoch 141/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1143384.125
0
Epoch 142/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1138352.000
0
Epoch 143/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1155653.875
0
Epoch 144/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1158681.875
0
Epoch 145/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1126948.500
0
Epoch 146/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1144693.375
0
Epoch 147/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1150950.750
0
Epoch 148/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 3ms/step - accuracy: 0.0000e+00 - loss: 1123072.625
0
Epoch 149/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1145065.375
0
Epoch 150/150
45/45 ━━━━━━━━━━━━━━━━━━━━ 0s 2ms/step - accuracy: 0.0000e+00 - loss: 1177632.125
0

In [73]: import matplotlib.pyplot as plt

plt.figure(figsize=(10, 6))
plt.plot(dataset['Date'], dataset['Adj Close'], label='Adj Close Price', color='
plt.title('Adjusted Close Price Over Time')
plt.xlabel('Date')
plt.ylabel('Price')
plt.legend()
plt.savefig('closeprice.png')
plt.show()
In [55]: # Training vs Validation Loss
plt.figure(figsize=(10, 6))
plt.plot(history.history['loss'], label='Training Loss', color='blue')
plt.plot(history.history['val_loss'], label='Validation Loss', color='orange')
plt.title('Model Training Performance')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.show()

In [63]: # Ensure y_test and y_pred are unscaled


y_test_original = scaler_y.inverse_transform(y_test.reshape(-1, 1)).flatten()
y_pred_original = scaler_y.inverse_transform(y_pred.reshape(-1, 1)).flatten()
# Residual Plot
residuals = y_test_original - y_pred_original
plt.figure(figsize=(10, 6))
plt.scatter(y_test_original, residuals, alpha=0.6, color='red')
plt.axhline(y=0, color='black', linestyle='--', linewidth=2)
plt.title('Residual Plot')
plt.xlabel('True Values')
plt.ylabel('Residuals')
plt.show()

In [65]: from sklearn.metrics import mean_squared_error

mse = mean_squared_error(y_test, y_pred)


print(f"Mean Squared Error (MSE): {mse}")

from sklearn.metrics import r2_score

r2 = r2_score(y_test, y_pred)
print(f"R-Squared (R²): {r2}")

Mean Squared Error (MSE): 1.0822424626158424e-05


R-Squared (R²): 0.9997992917716426

In [ ]:

You might also like