0% found this document useful (0 votes)
156 views26 pages

Face Landmark Detection Using CNN, Random Forest & XGBoost

Uploaded by

atul61435
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
156 views26 pages

Face Landmark Detection Using CNN, Random Forest & XGBoost

Uploaded by

atul61435
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 26

face-landmark-detection

May 6, 2023

Importing the data from Kaggle


[1]: ! pip install -q kaggle
from google.colab import files
files.upload()
! mkdir ~/.kaggle
! cp kaggle.json ~/.kaggle/
! chmod 600 ~/.kaggle/kaggle.json

<IPython.core.display.HTML object>
Saving kaggle.json to kaggle.json

[2]: !kaggle datasets download -d drgilermo/face-images-with-marked-landmark-points

Downloading face-images-with-marked-landmark-points.zip to /content


100% 105M/105M [00:05<00:00, 23.8MB/s]
100% 105M/105M [00:05<00:00, 18.5MB/s]

[3]: !unzip face-images-with-marked-landmark-points.zip

Archive: face-images-with-marked-landmark-points.zip
inflating: face_images.npz
inflating: facial_keypoints.csv
Importing Dependencies
[4]: import numpy as np
import pandas as pd
import keras
import cv2 as cv
import xgboost as xgb
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestRegressor
from keras.layers import Conv2D, MaxPool2D, GlobalAveragePooling2D, Dense,␣
↪Dropout, Input

from keras.models import Sequential, Model


from sklearn.metrics import mean_absolute_error

1
Loading the images
[5]: images = np.load('/content/face_images.npz')
faces = images.get(images.files[0])
faces=np.moveaxis(faces,-1,0)
faces=faces.reshape(faces.shape[0],faces.shape[1],faces.shape[1],1)

[6]: print(f"shape of the faces {faces.shape}")

shape of the faces (7049, 96, 96, 1)

[7]: df = pd.read_csv('/content/facial_keypoints.csv')

[8]: df.head()

[8]: left_eye_center_x left_eye_center_y right_eye_center_x \


0 66.033564 39.002274 30.227008
1 64.332936 34.970077 29.949277
2 65.057053 34.909642 30.903789
3 65.225739 37.261774 32.023096
4 66.725301 39.621261 32.244810

right_eye_center_y left_eye_inner_corner_x left_eye_inner_corner_y \


0 36.421678 59.582075 39.647423
1 33.448715 58.856170 35.274349
2 34.909642 59.412000 36.320968
3 37.261774 60.003339 39.127179
4 38.042032 58.565890 39.621261

left_eye_outer_corner_x left_eye_outer_corner_y right_eye_inner_corner_x \


0 73.130346 39.969997 36.356571
1 70.722723 36.187166 36.034723
2 70.984421 36.320968 37.678105
3 72.314713 38.380967 37.618643
4 72.515926 39.884466 36.982380

right_eye_inner_corner_y … nose_tip_x nose_tip_y mouth_left_corner_x \


0 37.389402 … 44.420571 57.066803 61.195308
1 34.361532 … 48.206298 55.660936 56.421447
2 36.320968 … 47.557263 53.538947 60.822947
3 38.754115 … 51.885078 54.166539 65.598887
4 39.094852 … 43.299534 64.889521 60.671411

mouth_left_corner_y mouth_right_corner_x mouth_right_corner_y \


0 79.970165 28.614496 77.388992
1 76.352000 35.122383 76.047660
2 73.014316 33.726316 72.732000
3 72.703722 37.245496 74.195478

2
4 77.523239 31.191755 76.997301

mouth_center_top_lip_x mouth_center_top_lip_y mouth_center_bottom_lip_x \


0 43.312602 72.935459 43.130707
1 46.684596 70.266553 45.467915
2 47.274947 70.191789 47.274947
3 50.303165 70.091687 51.561183
4 44.962748 73.707387 44.227141

mouth_center_bottom_lip_y
0 84.485774
1 85.480170
2 78.659368
3 78.268383
4 86.871166

[5 rows x 30 columns]

[9]: df.corr()

[9]: left_eye_center_x left_eye_center_y \


left_eye_center_x 1.000000 0.029908
left_eye_center_y 0.029908 1.000000
right_eye_center_x 0.274459 0.259012
right_eye_center_y -0.154728 0.346331
left_eye_inner_corner_x 0.856572 0.065749
left_eye_inner_corner_y 0.022211 0.926728
left_eye_outer_corner_x 0.879976 -0.013411
left_eye_outer_corner_y 0.062128 0.956066
right_eye_inner_corner_x 0.308649 0.101996
right_eye_inner_corner_y -0.104698 0.537101
right_eye_outer_corner_x -0.065016 0.152378
right_eye_outer_corner_y -0.075857 0.229097
left_eyebrow_inner_end_x 0.631805 0.082724
left_eyebrow_inner_end_y -0.128193 0.609392
left_eyebrow_outer_end_x 0.796638 0.057346
left_eyebrow_outer_end_y -0.071141 0.760461
right_eyebrow_inner_end_x 0.268463 0.225890
right_eyebrow_inner_end_y -0.201853 0.342458
right_eyebrow_outer_end_x -0.137146 0.194586
right_eyebrow_outer_end_y -0.190548 0.070824
nose_tip_x 0.457468 -0.166891
nose_tip_y 0.192274 0.333053
mouth_left_corner_x 0.422891 -0.340755
mouth_left_corner_y 0.284087 0.480924
mouth_right_corner_x -0.008382 -0.392434
mouth_right_corner_y 0.202842 0.219366

3
mouth_center_top_lip_x 0.260753 -0.425576
mouth_center_top_lip_y 0.151302 0.251472
mouth_center_bottom_lip_x 0.374314 -0.379980
mouth_center_bottom_lip_y 0.265655 0.130062

right_eye_center_x right_eye_center_y \
left_eye_center_x 0.274459 -0.154728
left_eye_center_y 0.259012 0.346331
right_eye_center_x 1.000000 0.067621
right_eye_center_y 0.067621 1.000000
left_eye_inner_corner_x 0.294550 -0.149295
left_eye_inner_corner_y 0.127367 0.531960
left_eye_outer_corner_x -0.024408 -0.144389
left_eye_outer_corner_y 0.060942 0.221829
right_eye_inner_corner_x 0.817895 0.055758
right_eye_inner_corner_y 0.090214 0.923050
right_eye_outer_corner_x 0.861858 0.078540
right_eye_outer_corner_y 0.062292 0.954285
left_eyebrow_inner_end_x 0.301658 -0.262560
left_eyebrow_inner_end_y 0.138773 0.366292
left_eyebrow_outer_end_x -0.063364 -0.240284
left_eyebrow_outer_end_y 0.083405 0.093072
right_eyebrow_inner_end_x 0.572767 -0.121474
right_eyebrow_inner_end_y 0.137839 0.600844
right_eyebrow_outer_end_x 0.776246 -0.015320
right_eyebrow_outer_end_y 0.084007 0.751725
nose_tip_x 0.402189 0.182866
nose_tip_y 0.024502 0.329233
mouth_left_corner_x 0.061124 0.312434
mouth_left_corner_y -0.172144 0.098019
mouth_right_corner_x 0.424439 0.257956
mouth_right_corner_y -0.215123 0.397645
mouth_center_top_lip_x 0.308889 0.294145
mouth_center_top_lip_y -0.109026 0.198499
mouth_center_bottom_lip_x 0.319541 0.344010
mouth_center_bottom_lip_y -0.251105 0.044597

left_eye_inner_corner_x left_eye_inner_corner_y \
left_eye_center_x 0.856572 0.022211
left_eye_center_y 0.065749 0.926728
right_eye_center_x 0.294550 0.127367
right_eye_center_y -0.149295 0.531960
left_eye_inner_corner_x 1.000000 -0.021444
left_eye_inner_corner_y -0.021444 1.000000
left_eye_outer_corner_x 0.653318 0.010531
left_eye_outer_corner_y 0.080430 0.869743
right_eye_inner_corner_x 0.267661 0.150281

4
right_eye_inner_corner_y -0.172852 0.710354
right_eye_outer_corner_x 0.248756 0.104186
right_eye_outer_corner_y -0.106544 0.371340
left_eyebrow_inner_end_x 0.750635 -0.011428
left_eyebrow_inner_end_y -0.152820 0.677130
left_eyebrow_outer_end_x 0.622702 0.035147
left_eyebrow_outer_end_y -0.021592 0.681814
right_eyebrow_inner_end_x 0.372169 0.180705
right_eyebrow_inner_end_y -0.258597 0.489169
right_eyebrow_outer_end_x 0.112094 0.130958
right_eyebrow_outer_end_y -0.204863 0.208522
nose_tip_x 0.387317 -0.091006
nose_tip_y -0.054712 0.426897
mouth_left_corner_x 0.287427 -0.211855
mouth_left_corner_y 0.156495 0.493657
mouth_right_corner_x 0.145968 -0.335989
mouth_right_corner_y 0.046439 0.327081
mouth_center_top_lip_x 0.322217 -0.315929
mouth_center_top_lip_y -0.083635 0.387311
mouth_center_bottom_lip_x 0.242669 -0.367069
mouth_center_bottom_lip_y 0.303769 0.308555

left_eye_outer_corner_x left_eye_outer_corner_y \
left_eye_center_x 0.879976 0.062128
left_eye_center_y -0.013411 0.956066
right_eye_center_x -0.024408 0.060942
right_eye_center_y -0.144389 0.221829
left_eye_inner_corner_x 0.653318 0.080430
left_eye_inner_corner_y 0.010531 0.869743
left_eye_outer_corner_x 1.000000 0.054156
left_eye_outer_corner_y 0.054156 1.000000
right_eye_inner_corner_x 0.316254 0.050529
right_eye_inner_corner_y -0.094245 0.373183
right_eye_outer_corner_x -0.350353 0.090892
right_eye_outer_corner_y -0.077623 0.060410
left_eyebrow_inner_end_x 0.468184 0.109101
left_eyebrow_inner_end_y -0.140902 0.522014
left_eyebrow_outer_end_x 0.868225 0.139029
left_eyebrow_outer_end_y -0.097167 0.802413
right_eyebrow_inner_end_x 0.229020 0.232601
right_eyebrow_inner_end_y -0.200114 0.188524
right_eyebrow_outer_end_x -0.336098 0.164726
right_eyebrow_outer_end_y -0.213001 -0.069495
nose_tip_x 0.179189 -0.176047
nose_tip_y 0.165361 0.222928
mouth_left_corner_x 0.507755 -0.392594
mouth_left_corner_y 0.325291 0.506364

5
mouth_right_corner_x -0.136411 -0.462165
mouth_right_corner_y 0.263466 0.183239
mouth_center_top_lip_x 0.229154 -0.494482
mouth_center_top_lip_y 0.292129 0.216060
mouth_center_bottom_lip_x 0.176392 -0.561581
mouth_center_bottom_lip_y 0.332880 0.329316

right_eye_inner_corner_x right_eye_inner_corner_y \
left_eye_center_x 0.308649 -0.104698
left_eye_center_y 0.101996 0.537101
right_eye_center_x 0.817895 0.090214
right_eye_center_y 0.055758 0.923050
left_eye_inner_corner_x 0.267661 -0.172852
left_eye_inner_corner_y 0.150281 0.710354
left_eye_outer_corner_x 0.316254 -0.094245
left_eye_outer_corner_y 0.050529 0.373183
right_eye_inner_corner_x 1.000000 0.124023
right_eye_inner_corner_y 0.124023 1.000000
right_eye_outer_corner_x 0.543188 0.054442
right_eye_outer_corner_y 0.074985 0.861327
left_eyebrow_inner_end_x 0.346247 -0.243664
left_eyebrow_inner_end_y 0.137977 0.515579
left_eyebrow_outer_end_x 0.189012 -0.183551
left_eyebrow_outer_end_y 0.034023 0.237008
right_eyebrow_inner_end_x 0.657995 -0.046338
right_eyebrow_inner_end_y 0.125718 0.672297
right_eyebrow_outer_end_x 0.521745 -0.003771
right_eyebrow_outer_end_y 0.068031 0.673633
nose_tip_x 0.377931 0.053212
nose_tip_y 0.118548 0.356647
mouth_left_corner_x 0.262597 0.237858
mouth_left_corner_y -0.054888 0.225398
mouth_right_corner_x 0.262056 0.108121
mouth_right_corner_y -0.094748 0.449346
mouth_center_top_lip_x 0.369890 0.176403
mouth_center_top_lip_y 0.109785 0.354693
mouth_center_bottom_lip_x 0.309453 0.203002
mouth_center_bottom_lip_y -0.230177 0.164086

… nose_tip_x nose_tip_y mouth_left_corner_x \


left_eye_center_x … 0.457468 0.192274 0.422891
left_eye_center_y … -0.166891 0.333053 -0.340755
right_eye_center_x … 0.402189 0.024502 0.061124
right_eye_center_y … 0.182866 0.329233 0.312434
left_eye_inner_corner_x … 0.387317 -0.054712 0.287427
left_eye_inner_corner_y … -0.091006 0.426897 -0.211855
left_eye_outer_corner_x … 0.179189 0.165361 0.507755

6
left_eye_outer_corner_y … -0.176047 0.222928 -0.392594
right_eye_inner_corner_x … 0.377931 0.118548 0.262597
right_eye_inner_corner_y … 0.053212 0.356647 0.237858
right_eye_outer_corner_x … 0.156996 -0.133285 -0.177957
right_eye_outer_corner_y … 0.171852 0.118600 0.454774
left_eyebrow_inner_end_x … 0.554900 -0.087723 0.104528
left_eyebrow_inner_end_y … -0.007749 0.477947 -0.218913
left_eyebrow_outer_end_x … 0.103122 0.130824 0.320783
left_eyebrow_outer_end_y … -0.161689 0.196461 -0.438723
right_eyebrow_inner_end_x … 0.516089 0.126098 -0.059611
right_eyebrow_inner_end_y … -0.057668 0.422549 0.060252
right_eyebrow_outer_end_x … 0.046404 -0.134561 -0.302128
right_eyebrow_outer_end_y … 0.187261 0.112216 0.374869
nose_tip_x … 1.000000 0.070132 0.339788
nose_tip_y … 0.070132 1.000000 0.035640
mouth_left_corner_x … 0.339788 0.035640 1.000000
mouth_left_corner_y … -0.099397 0.442267 -0.153760
mouth_right_corner_x … 0.308948 -0.177473 0.257016
mouth_right_corner_y … 0.040560 0.415741 0.156002
mouth_center_top_lip_x … 0.703506 -0.060059 0.722451
mouth_center_top_lip_y … -0.010470 0.733981 0.119111
mouth_center_bottom_lip_x … 0.845545 0.064277 0.737955
mouth_center_bottom_lip_y … -0.010041 0.138304 0.051944

mouth_left_corner_y mouth_right_corner_x \
left_eye_center_x 0.284087 -0.008382
left_eye_center_y 0.480924 -0.392434
right_eye_center_x -0.172144 0.424439
right_eye_center_y 0.098019 0.257956
left_eye_inner_corner_x 0.156495 0.145968
left_eye_inner_corner_y 0.493657 -0.335989
left_eye_outer_corner_x 0.325291 -0.136411
left_eye_outer_corner_y 0.506364 -0.462165
right_eye_inner_corner_x -0.054888 0.262056
right_eye_inner_corner_y 0.225398 0.108121
right_eye_outer_corner_x -0.262503 0.468401
right_eye_outer_corner_y 0.031565 0.364246
left_eyebrow_inner_end_x 0.037376 0.017408
left_eyebrow_inner_end_y 0.276619 -0.170956
left_eyebrow_outer_end_x 0.389780 -0.240134
left_eyebrow_outer_end_y 0.386743 -0.385602
right_eyebrow_inner_end_x 0.139671 0.024607
right_eyebrow_inner_end_y 0.099020 0.117008
right_eyebrow_outer_end_x -0.245991 0.315302
right_eyebrow_outer_end_y -0.066545 0.351068
nose_tip_x -0.099397 0.308948
nose_tip_y 0.442267 -0.177473

7
mouth_left_corner_x -0.153760 0.257016
mouth_left_corner_y 1.000000 -0.231965
mouth_right_corner_x -0.231965 1.000000
mouth_right_corner_y 0.845165 0.047169
mouth_center_top_lip_x -0.237070 0.706419
mouth_center_top_lip_y 0.726913 -0.182543
mouth_center_bottom_lip_x -0.278776 0.727747
mouth_center_bottom_lip_y 0.724428 -0.183575

mouth_right_corner_y mouth_center_top_lip_x \
left_eye_center_x 0.202842 0.260753
left_eye_center_y 0.219366 -0.425576
right_eye_center_x -0.215123 0.308889
right_eye_center_y 0.397645 0.294145
left_eye_inner_corner_x 0.046439 0.322217
left_eye_inner_corner_y 0.327081 -0.315929
left_eye_outer_corner_x 0.263466 0.229154
left_eye_outer_corner_y 0.183239 -0.494482
right_eye_inner_corner_x -0.094748 0.369890
right_eye_inner_corner_y 0.449346 0.176403
right_eye_outer_corner_x -0.323196 0.179465
right_eye_outer_corner_y 0.398740 0.451419
left_eyebrow_inner_end_x -0.124903 0.266158
left_eyebrow_inner_end_y 0.202403 -0.183568
left_eyebrow_outer_end_x 0.264717 0.058748
left_eyebrow_outer_end_y 0.100390 -0.448908
right_eyebrow_inner_end_x -0.035666 0.191600
right_eyebrow_inner_end_y 0.244345 0.058862
right_eyebrow_outer_end_x -0.383826 -0.001356
right_eyebrow_outer_end_y 0.276684 0.426359
nose_tip_x 0.040560 0.703506
nose_tip_y 0.415741 -0.060059
mouth_left_corner_x 0.156002 0.722451
mouth_left_corner_y 0.845165 -0.237070
mouth_right_corner_x 0.047169 0.706419
mouth_right_corner_y 1.000000 0.114100
mouth_center_top_lip_x 0.114100 1.000000
mouth_center_top_lip_y 0.741801 -0.021879
mouth_center_bottom_lip_x 0.119912 0.975837
mouth_center_bottom_lip_y 0.681152 -0.077797

mouth_center_top_lip_y mouth_center_bottom_lip_x \
left_eye_center_x 0.151302 0.374314
left_eye_center_y 0.251472 -0.379980
right_eye_center_x -0.109026 0.319541
right_eye_center_y 0.198499 0.344010
left_eye_inner_corner_x -0.083635 0.242669

8
left_eye_inner_corner_y 0.387311 -0.367069
left_eye_outer_corner_x 0.292129 0.176392
left_eye_outer_corner_y 0.216060 -0.561581
right_eye_inner_corner_x 0.109785 0.309453
right_eye_inner_corner_y 0.354693 0.203002
right_eye_outer_corner_x -0.327911 0.140704
right_eye_outer_corner_y 0.139957 0.504343
left_eyebrow_inner_end_x -0.114208 0.178256
left_eyebrow_inner_end_y 0.414661 -0.219322
left_eyebrow_outer_end_x 0.243773 0.020986
left_eyebrow_outer_end_y 0.159086 -0.493071
right_eyebrow_inner_end_x 0.081940 0.093641
right_eyebrow_inner_end_y 0.371001 0.076888
right_eyebrow_outer_end_x -0.271817 -0.042747
right_eyebrow_outer_end_y 0.085133 0.477362
nose_tip_x -0.010470 0.845545
nose_tip_y 0.733981 0.064277
mouth_left_corner_x 0.119111 0.737955
mouth_left_corner_y 0.726913 -0.278776
mouth_right_corner_x -0.182543 0.727747
mouth_right_corner_y 0.741801 0.119912
mouth_center_top_lip_x -0.021879 0.975837
mouth_center_top_lip_y 1.000000 -0.047274
mouth_center_bottom_lip_x -0.047274 1.000000
mouth_center_bottom_lip_y 0.410089 -0.034845

mouth_center_bottom_lip_y
left_eye_center_x 0.265655
left_eye_center_y 0.130062
right_eye_center_x -0.251105
right_eye_center_y 0.044597
left_eye_inner_corner_x 0.303769
left_eye_inner_corner_y 0.308555
left_eye_outer_corner_x 0.332880
left_eye_outer_corner_y 0.329316
right_eye_inner_corner_x -0.230177
right_eye_inner_corner_y 0.164086
right_eye_outer_corner_x -0.277736
right_eye_outer_corner_y 0.090795
left_eyebrow_inner_end_x 0.079219
left_eyebrow_inner_end_y 0.103497
left_eyebrow_outer_end_x 0.419651
left_eyebrow_outer_end_y 0.234645
right_eyebrow_inner_end_x -0.016916
right_eyebrow_inner_end_y -0.019619
right_eyebrow_outer_end_x -0.368392
right_eyebrow_outer_end_y 0.018289

9
nose_tip_x -0.010041
nose_tip_y 0.138304
mouth_left_corner_x 0.051944
mouth_left_corner_y 0.724428
mouth_right_corner_x -0.183575
mouth_right_corner_y 0.681152
mouth_center_top_lip_x -0.077797
mouth_center_top_lip_y 0.410089
mouth_center_bottom_lip_x -0.034845
mouth_center_bottom_lip_y 1.000000

[30 rows x 30 columns]

[10]: df.isnull().sum()

[10]: left_eye_center_x 10
left_eye_center_y 10
right_eye_center_x 13
right_eye_center_y 13
left_eye_inner_corner_x 4778
left_eye_inner_corner_y 4778
left_eye_outer_corner_x 4782
left_eye_outer_corner_y 4782
right_eye_inner_corner_x 4781
right_eye_inner_corner_y 4781
right_eye_outer_corner_x 4781
right_eye_outer_corner_y 4781
left_eyebrow_inner_end_x 4779
left_eyebrow_inner_end_y 4779
left_eyebrow_outer_end_x 4824
left_eyebrow_outer_end_y 4824
right_eyebrow_inner_end_x 4779
right_eyebrow_inner_end_y 4779
right_eyebrow_outer_end_x 4813
right_eyebrow_outer_end_y 4813
nose_tip_x 0
nose_tip_y 0
mouth_left_corner_x 4780
mouth_left_corner_y 4780
mouth_right_corner_x 4779
mouth_right_corner_y 4779
mouth_center_top_lip_x 4774
mouth_center_top_lip_y 4774
mouth_center_bottom_lip_x 33
mouth_center_bottom_lip_y 33
dtype: int64

10
[11]: def missing_values(df, column): # handling the missing values with mean
df[column] = df[column].fillna(df[column].mean())

[12]: for i in df.columns:


missing_values(df, i)

[13]: df.isnull().sum()

[13]: left_eye_center_x 0
left_eye_center_y 0
right_eye_center_x 0
right_eye_center_y 0
left_eye_inner_corner_x 0
left_eye_inner_corner_y 0
left_eye_outer_corner_x 0
left_eye_outer_corner_y 0
right_eye_inner_corner_x 0
right_eye_inner_corner_y 0
right_eye_outer_corner_x 0
right_eye_outer_corner_y 0
left_eyebrow_inner_end_x 0
left_eyebrow_inner_end_y 0
left_eyebrow_outer_end_x 0
left_eyebrow_outer_end_y 0
right_eyebrow_inner_end_x 0
right_eyebrow_inner_end_y 0
right_eyebrow_outer_end_x 0
right_eyebrow_outer_end_y 0
nose_tip_x 0
nose_tip_y 0
mouth_left_corner_x 0
mouth_left_corner_y 0
mouth_right_corner_x 0
mouth_right_corner_y 0
mouth_center_top_lip_x 0
mouth_center_top_lip_y 0
mouth_center_bottom_lip_x 0
mouth_center_bottom_lip_y 0
dtype: int64

[14]: face_points = [] # appending the landmarks into face_points list


for i in range(df.shape[0]):
face_points.append(list(df.iloc[i,:]))

[15]: face_points = np.array(face_points) #converting the face points into numpy␣


↪array

11
[16]: plt.figure(figsize=(10,5))
for i in range(25):
plt.subplot(5,5,i+1)
plt.imshow(faces[i], cmap='gray')
plt.axis("off")
plt.show()

[17]: def plot_points(img_number):


points = list(df.iloc[img_number,:])
plt.imshow(faces[img_number], cmap='gray')
for i in range(0, len(points), 2):
plt.scatter(points[i:i+2][0], points[i:i+2][1], c='y')
plt.show()
plot_points(799)

12
[18]: faces = faces/255 # normalising the faces

[19]: x_train, x_test, y_train, y_test = train_test_split(faces, face_points,␣


↪test_size = 0.2, random_state=44)

[20]: print(f"the shape of x_train is {x_train.shape}")


print(f"the shape of y_train is {y_train.shape}")
print(f"the shape of x_test is {x_test.shape}")
print(f"the shape of y_test is {y_test.shape}")

the shape of x_train is (5639, 96, 96, 1)


the shape of y_train is (5639, 30)
the shape of x_test is (1410, 96, 96, 1)
the shape of y_test is (1410, 30)
CNN Model
[21]: model = Sequential()

model.add(Conv2D(32, (3,3), input_shape=(96,96,1), activation="relu",␣


↪padding="same"))

model.add(MaxPool2D(2,2))
model.add(Dropout(0.1))

13
model.add(Conv2D(64, (3,3), activation="relu",padding="same"))
model.add(MaxPool2D(2,2))
model.add(Dropout(0.2))
model.add(Conv2D(128, (3,3), activation="relu", padding="same"))
model.add(MaxPool2D(2,2))
model.add(Dropout(0.3))
model.add(Conv2D(256, (3,3), activation="relu", padding="same"))
model.add(MaxPool2D(2,2))
model.add(Dropout(0.3))
model.add(GlobalAveragePooling2D())
model.add(Dense(128, activation="relu"))
model.add(Dense(30, activation="leaky_relu"))

[22]: model.summary()

Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 96, 96, 32) 320

max_pooling2d (MaxPooling2D (None, 48, 48, 32) 0


)

dropout (Dropout) (None, 48, 48, 32) 0

conv2d_1 (Conv2D) (None, 48, 48, 64) 18496

max_pooling2d_1 (MaxPooling (None, 24, 24, 64) 0


2D)

dropout_1 (Dropout) (None, 24, 24, 64) 0

conv2d_2 (Conv2D) (None, 24, 24, 128) 73856

max_pooling2d_2 (MaxPooling (None, 12, 12, 128) 0


2D)

dropout_2 (Dropout) (None, 12, 12, 128) 0

conv2d_3 (Conv2D) (None, 12, 12, 256) 295168

max_pooling2d_3 (MaxPooling (None, 6, 6, 256) 0


2D)

dropout_3 (Dropout) (None, 6, 6, 256) 0

14
global_average_pooling2d (G (None, 256) 0
lobalAveragePooling2D)

dense (Dense) (None, 128) 32896

dense_1 (Dense) (None, 30) 3870

=================================================================
Total params: 424,606
Trainable params: 424,606
Non-trainable params: 0
_________________________________________________________________

[23]: model.compile(optimizer="adam", loss="mse", metrics=['mae'])

[24]: history = model.fit(x_train, y_train, validation_data=(x_test, y_test),␣


↪epochs=100)

Epoch 1/100
177/177 [==============================] - 15s 25ms/step - loss: 247.6686 - mae:
8.0113 - val_loss: 83.7798 - val_mae: 8.1261
Epoch 2/100
177/177 [==============================] - 3s 17ms/step - loss: 10.6864 - mae:
2.1842 - val_loss: 113.7821 - val_mae: 9.6577
Epoch 3/100
177/177 [==============================] - 3s 16ms/step - loss: 9.2263 - mae:
1.9392 - val_loss: 110.1818 - val_mae: 9.4742
Epoch 4/100
177/177 [==============================] - 3s 16ms/step - loss: 9.3054 - mae:
1.9498 - val_loss: 98.8887 - val_mae: 8.9893
Epoch 5/100
177/177 [==============================] - 3s 16ms/step - loss: 9.6848 - mae:
2.0189 - val_loss: 103.1080 - val_mae: 9.1244
Epoch 6/100
177/177 [==============================] - 3s 16ms/step - loss: 8.8998 - mae:
1.8674 - val_loss: 159.7516 - val_mae: 11.5458
Epoch 7/100
177/177 [==============================] - 3s 17ms/step - loss: 9.4427 - mae:
1.9738 - val_loss: 111.6284 - val_mae: 9.5531
Epoch 8/100
177/177 [==============================] - 3s 17ms/step - loss: 9.4034 - mae:
1.9845 - val_loss: 97.8728 - val_mae: 8.9150
Epoch 9/100
177/177 [==============================] - 3s 16ms/step - loss: 9.5542 - mae:
1.9968 - val_loss: 133.2152 - val_mae: 10.4973
Epoch 10/100
177/177 [==============================] - 3s 16ms/step - loss: 9.3016 - mae:
1.9448 - val_loss: 100.8673 - val_mae: 9.1203

15
Epoch 11/100
177/177 [==============================] - 3s 17ms/step - loss: 9.5358 - mae:
1.9911 - val_loss: 89.7404 - val_mae: 8.5451
Epoch 12/100
177/177 [==============================] - 3s 16ms/step - loss: 9.2227 - mae:
1.9397 - val_loss: 106.7844 - val_mae: 9.3419
Epoch 13/100
177/177 [==============================] - 3s 16ms/step - loss: 8.8619 - mae:
1.8504 - val_loss: 102.1288 - val_mae: 9.1179
Epoch 14/100
177/177 [==============================] - 3s 16ms/step - loss: 9.2572 - mae:
1.9336 - val_loss: 106.6310 - val_mae: 9.3546
Epoch 15/100
177/177 [==============================] - 3s 17ms/step - loss: 9.3389 - mae:
1.9620 - val_loss: 82.9103 - val_mae: 8.1393
Epoch 16/100
177/177 [==============================] - 3s 17ms/step - loss: 9.1877 - mae:
1.9333 - val_loss: 90.3143 - val_mae: 8.5493
Epoch 17/100
177/177 [==============================] - 3s 16ms/step - loss: 9.1932 - mae:
1.9149 - val_loss: 114.8879 - val_mae: 9.7004
Epoch 18/100
177/177 [==============================] - 3s 16ms/step - loss: 9.1223 - mae:
1.9126 - val_loss: 84.8320 - val_mae: 8.2596
Epoch 19/100
177/177 [==============================] - 3s 17ms/step - loss: 9.2219 - mae:
1.9320 - val_loss: 103.1709 - val_mae: 9.1269
Epoch 20/100
177/177 [==============================] - 3s 16ms/step - loss: 8.8229 - mae:
1.8510 - val_loss: 101.4413 - val_mae: 9.1014
Epoch 21/100
177/177 [==============================] - 3s 17ms/step - loss: 8.6790 - mae:
1.8219 - val_loss: 65.5756 - val_mae: 7.2342
Epoch 22/100
177/177 [==============================] - 3s 17ms/step - loss: 9.1055 - mae:
1.9128 - val_loss: 79.3852 - val_mae: 7.9963
Epoch 23/100
177/177 [==============================] - 3s 17ms/step - loss: 8.9796 - mae:
1.8918 - val_loss: 77.5703 - val_mae: 7.9403
Epoch 24/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7981 - mae:
1.8426 - val_loss: 73.4912 - val_mae: 7.6473
Epoch 25/100
177/177 [==============================] - 3s 17ms/step - loss: 8.8405 - mae:
1.8562 - val_loss: 95.9177 - val_mae: 8.8139
Epoch 26/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7959 - mae:
1.8459 - val_loss: 101.8479 - val_mae: 9.1248

16
Epoch 27/100
177/177 [==============================] - 3s 17ms/step - loss: 8.9323 - mae:
1.8700 - val_loss: 53.6439 - val_mae: 6.5124
Epoch 28/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7616 - mae:
1.8328 - val_loss: 67.9234 - val_mae: 7.3416
Epoch 29/100
177/177 [==============================] - 3s 16ms/step - loss: 9.0021 - mae:
1.8954 - val_loss: 97.4173 - val_mae: 8.8958
Epoch 30/100
177/177 [==============================] - 3s 16ms/step - loss: 8.9739 - mae:
1.8845 - val_loss: 74.2736 - val_mae: 7.6851
Epoch 31/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7079 - mae:
1.8233 - val_loss: 37.1465 - val_mae: 5.2425
Epoch 32/100
177/177 [==============================] - 3s 17ms/step - loss: 8.8279 - mae:
1.8487 - val_loss: 52.3115 - val_mae: 6.3940
Epoch 33/100
177/177 [==============================] - 3s 16ms/step - loss: 8.7103 - mae:
1.8222 - val_loss: 69.0528 - val_mae: 7.4193
Epoch 34/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7592 - mae:
1.8341 - val_loss: 82.6843 - val_mae: 8.1918
Epoch 35/100
177/177 [==============================] - 3s 17ms/step - loss: 8.5312 - mae:
1.7767 - val_loss: 69.9608 - val_mae: 7.4930
Epoch 36/100
177/177 [==============================] - 3s 17ms/step - loss: 8.5891 - mae:
1.8035 - val_loss: 61.9837 - val_mae: 7.0418
Epoch 37/100
177/177 [==============================] - 3s 16ms/step - loss: 8.7157 - mae:
1.8235 - val_loss: 78.1996 - val_mae: 7.9179
Epoch 38/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7549 - mae:
1.8291 - val_loss: 66.0679 - val_mae: 7.2954
Epoch 39/100
177/177 [==============================] - 3s 16ms/step - loss: 8.7454 - mae:
1.8421 - val_loss: 65.0089 - val_mae: 7.1298
Epoch 40/100
177/177 [==============================] - 3s 17ms/step - loss: 8.7682 - mae:
1.8444 - val_loss: 61.1667 - val_mae: 6.9679
Epoch 41/100
177/177 [==============================] - 3s 16ms/step - loss: 8.4494 - mae:
1.7594 - val_loss: 54.8232 - val_mae: 6.5255
Epoch 42/100
177/177 [==============================] - 3s 16ms/step - loss: 9.3137 - mae:
1.9588 - val_loss: 44.4323 - val_mae: 5.8153

17
Epoch 43/100
177/177 [==============================] - 3s 16ms/step - loss: 8.9189 - mae:
1.8762 - val_loss: 54.5603 - val_mae: 6.5240
Epoch 44/100
177/177 [==============================] - 3s 17ms/step - loss: 8.6738 - mae:
1.8216 - val_loss: 72.8856 - val_mae: 7.6386
Epoch 45/100
177/177 [==============================] - 3s 16ms/step - loss: 8.5333 - mae:
1.7914 - val_loss: 62.3959 - val_mae: 6.9697
Epoch 46/100
177/177 [==============================] - 3s 16ms/step - loss: 8.7475 - mae:
1.8180 - val_loss: 47.0339 - val_mae: 6.0760
Epoch 47/100
177/177 [==============================] - 3s 17ms/step - loss: 8.5913 - mae:
1.8032 - val_loss: 47.9425 - val_mae: 6.1126
Epoch 48/100
177/177 [==============================] - 3s 16ms/step - loss: 8.4712 - mae:
1.7779 - val_loss: 56.3800 - val_mae: 6.6307
Epoch 49/100
177/177 [==============================] - 3s 17ms/step - loss: 8.4064 - mae:
1.7641 - val_loss: 53.0067 - val_mae: 6.4272
Epoch 50/100
177/177 [==============================] - 3s 16ms/step - loss: 8.4607 - mae:
1.7763 - val_loss: 63.2502 - val_mae: 7.0850
Epoch 51/100
177/177 [==============================] - 3s 17ms/step - loss: 8.5722 - mae:
1.8009 - val_loss: 52.5184 - val_mae: 6.4305
Epoch 52/100
177/177 [==============================] - 3s 16ms/step - loss: 8.3858 - mae:
1.7621 - val_loss: 53.6307 - val_mae: 6.5158
Epoch 53/100
177/177 [==============================] - 3s 17ms/step - loss: 8.2413 - mae:
1.7310 - val_loss: 53.4798 - val_mae: 6.4642
Epoch 54/100
177/177 [==============================] - 3s 16ms/step - loss: 8.5102 - mae:
1.7841 - val_loss: 44.5038 - val_mae: 5.8885
Epoch 55/100
177/177 [==============================] - 3s 16ms/step - loss: 8.6417 - mae:
1.8303 - val_loss: 52.5037 - val_mae: 6.4289
Epoch 56/100
177/177 [==============================] - 3s 16ms/step - loss: 8.4937 - mae:
1.7833 - val_loss: 52.7772 - val_mae: 6.4411
Epoch 57/100
177/177 [==============================] - 3s 17ms/step - loss: 8.2668 - mae:
1.7316 - val_loss: 32.1862 - val_mae: 4.8185
Epoch 58/100
177/177 [==============================] - 3s 16ms/step - loss: 8.7145 - mae:
1.8418 - val_loss: 26.5276 - val_mae: 4.3258

18
Epoch 59/100
177/177 [==============================] - 3s 16ms/step - loss: 8.2396 - mae:
1.7212 - val_loss: 35.2654 - val_mae: 5.1483
Epoch 60/100
177/177 [==============================] - 3s 16ms/step - loss: 8.4176 - mae:
1.7674 - val_loss: 43.3570 - val_mae: 5.7617
Epoch 61/100
177/177 [==============================] - 3s 17ms/step - loss: 8.5178 - mae:
1.7926 - val_loss: 42.6054 - val_mae: 5.7153
Epoch 62/100
177/177 [==============================] - 3s 16ms/step - loss: 8.3288 - mae:
1.7395 - val_loss: 33.5896 - val_mae: 5.0125
Epoch 63/100
177/177 [==============================] - 3s 16ms/step - loss: 8.1065 - mae:
1.6984 - val_loss: 20.0270 - val_mae: 3.5657
Epoch 64/100
177/177 [==============================] - 3s 16ms/step - loss: 8.3581 - mae:
1.7635 - val_loss: 22.6849 - val_mae: 3.9956
Epoch 65/100
177/177 [==============================] - 3s 16ms/step - loss: 8.0364 - mae:
1.6897 - val_loss: 26.7727 - val_mae: 4.3404
Epoch 66/100
177/177 [==============================] - 3s 16ms/step - loss: 8.1213 - mae:
1.7080 - val_loss: 21.6563 - val_mae: 3.8046
Epoch 67/100
177/177 [==============================] - 3s 15ms/step - loss: 8.2241 - mae:
1.7429 - val_loss: 16.0141 - val_mae: 3.2331
Epoch 68/100
177/177 [==============================] - 3s 16ms/step - loss: 8.0199 - mae:
1.6879 - val_loss: 42.4361 - val_mae: 5.7043
Epoch 69/100
177/177 [==============================] - 3s 15ms/step - loss: 8.3339 - mae:
1.7552 - val_loss: 23.3266 - val_mae: 4.0400
Epoch 70/100
177/177 [==============================] - 3s 17ms/step - loss: 8.1687 - mae:
1.7268 - val_loss: 16.2006 - val_mae: 3.2437
Epoch 71/100
177/177 [==============================] - 3s 16ms/step - loss: 8.1271 - mae:
1.7238 - val_loss: 15.0111 - val_mae: 2.9976
Epoch 72/100
177/177 [==============================] - 3s 16ms/step - loss: 8.0443 - mae:
1.7159 - val_loss: 12.3291 - val_mae: 2.6578
Epoch 73/100
177/177 [==============================] - 3s 16ms/step - loss: 8.0654 - mae:
1.7158 - val_loss: 24.9140 - val_mae: 4.2030
Epoch 74/100
177/177 [==============================] - 3s 17ms/step - loss: 8.0045 - mae:
1.7041 - val_loss: 16.1755 - val_mae: 3.2555

19
Epoch 75/100
177/177 [==============================] - 3s 15ms/step - loss: 7.8202 - mae:
1.6691 - val_loss: 20.4917 - val_mae: 3.7420
Epoch 76/100
177/177 [==============================] - 3s 15ms/step - loss: 8.0126 - mae:
1.7227 - val_loss: 19.7065 - val_mae: 3.6758
Epoch 77/100
177/177 [==============================] - 3s 15ms/step - loss: 7.8310 - mae:
1.6813 - val_loss: 15.4774 - val_mae: 3.1098
Epoch 78/100
177/177 [==============================] - 3s 17ms/step - loss: 7.7742 - mae:
1.6670 - val_loss: 16.0787 - val_mae: 3.1778
Epoch 79/100
177/177 [==============================] - 3s 17ms/step - loss: 7.8609 - mae:
1.6908 - val_loss: 21.0233 - val_mae: 3.7739
Epoch 80/100
177/177 [==============================] - 3s 15ms/step - loss: 7.8045 - mae:
1.6851 - val_loss: 17.0281 - val_mae: 3.3428
Epoch 81/100
177/177 [==============================] - 3s 15ms/step - loss: 7.8090 - mae:
1.7006 - val_loss: 13.2803 - val_mae: 2.8194
Epoch 82/100
177/177 [==============================] - 3s 16ms/step - loss: 8.0213 - mae:
1.7467 - val_loss: 11.8131 - val_mae: 2.6356
Epoch 83/100
177/177 [==============================] - 3s 17ms/step - loss: 7.7927 - mae:
1.7003 - val_loss: 12.2382 - val_mae: 2.6739
Epoch 84/100
177/177 [==============================] - 3s 16ms/step - loss: 7.6791 - mae:
1.6584 - val_loss: 15.3685 - val_mae: 3.1307
Epoch 85/100
177/177 [==============================] - 3s 16ms/step - loss: 7.6913 - mae:
1.6834 - val_loss: 16.1119 - val_mae: 3.2295
Epoch 86/100
177/177 [==============================] - 3s 15ms/step - loss: 7.5466 - mae:
1.6398 - val_loss: 10.7598 - val_mae: 2.4090
Epoch 87/100
177/177 [==============================] - 3s 16ms/step - loss: 7.6180 - mae:
1.6670 - val_loss: 13.5462 - val_mae: 2.8233
Epoch 88/100
177/177 [==============================] - 3s 15ms/step - loss: 7.7813 - mae:
1.7066 - val_loss: 18.1851 - val_mae: 3.5273
Epoch 89/100
177/177 [==============================] - 3s 15ms/step - loss: 7.6687 - mae:
1.6926 - val_loss: 18.3210 - val_mae: 3.5165
Epoch 90/100
177/177 [==============================] - 3s 16ms/step - loss: 7.7163 - mae:
1.6930 - val_loss: 13.8138 - val_mae: 2.9126

20
Epoch 91/100
177/177 [==============================] - 3s 17ms/step - loss: 7.3981 - mae:
1.6206 - val_loss: 8.9744 - val_mae: 2.1093
Epoch 92/100
177/177 [==============================] - 3s 17ms/step - loss: 7.5680 - mae:
1.6752 - val_loss: 19.7709 - val_mae: 3.6264
Epoch 93/100
177/177 [==============================] - 3s 15ms/step - loss: 7.6256 - mae:
1.6861 - val_loss: 13.3184 - val_mae: 2.8322
Epoch 94/100
177/177 [==============================] - 3s 16ms/step - loss: 7.3932 - mae:
1.6445 - val_loss: 10.0174 - val_mae: 2.2282
Epoch 95/100
177/177 [==============================] - 3s 17ms/step - loss: 7.3429 - mae:
1.6242 - val_loss: 11.8309 - val_mae: 2.6271
Epoch 96/100
177/177 [==============================] - 3s 16ms/step - loss: 7.5236 - mae:
1.6728 - val_loss: 12.2052 - val_mae: 2.6475
Epoch 97/100
177/177 [==============================] - 3s 16ms/step - loss: 7.4714 - mae:
1.6601 - val_loss: 7.3549 - val_mae: 1.6120
Epoch 98/100
177/177 [==============================] - 3s 16ms/step - loss: 7.4413 - mae:
1.6529 - val_loss: 8.3854 - val_mae: 1.9461
Epoch 99/100
177/177 [==============================] - 3s 16ms/step - loss: 7.4853 - mae:
1.6658 - val_loss: 11.0050 - val_mae: 2.4938
Epoch 100/100
177/177 [==============================] - 3s 17ms/step - loss: 7.2580 - mae:
1.6183 - val_loss: 11.8805 - val_mae: 2.6062

[25]: y_pred = model.predict(x_test)

45/45 [==============================] - 0s 4ms/step

[26]: print(f"mean absolute error of the model is {mean_absolute_error(y_pred,␣


↪y_test)}")

mean absolute error of the model is 2.606207433861229

[27]: def plot_points(img_number):


plt.imshow(x_test[img_number], cmap='gray')
predicted_points = model.predict(x_test[img_number].reshape(1, 96, 96, 1))
predicted_points = predicted_points[0]
original_points = y_test[img_number]
for i in range(0, 30, 2):
plt.scatter(original_points[i:i+2][0], original_points[i:i+2][1], c='y') ␣
↪ #original

21
plt.scatter(predicted_points[i:i+2][0], predicted_points[i:i+2][1], c='r') ␣
↪ #predicted
plt.show()
plot_points(555)

1/1 [==============================] - 0s 139ms/step

CNN Model as Feature Extractor + Random Forest for Regression


[29]: features_model = Model(inputs = [model.input], outputs = [model.
↪get_layer("global_average_pooling2d").output])

#creating a model with above CNN model upto GlobalAveragePooling2D layer output

[30]: train_features = features_model.predict(x_train) # predicting the features on␣


↪x_train

177/177 [==============================] - 1s 3ms/step

[31]: rf_reg = RandomForestRegressor()

[32]: rf_reg.fit(train_features, y_train) # fitting random forest on train_features␣


↪and y_train

22
[32]: RandomForestRegressor()

[33]: test_features = features_model.predict(x_test) # predicting the test features

45/45 [==============================] - 0s 3ms/step

[34]: output_points = rf_reg.predict(test_features) # predicting the output features

[35]: print(f"the mean abosulute error for random forest regressor is␣
↪{mean_absolute_error(output_points, y_test)}")

the mean abosulute error for random forest regressor is 1.2129046985875478

[36]: def plot_output(img_number):


predicted_points = output_points[img_number]
original_points = y_test[img_number]
plt.subplot(1,2,1)
plt.imshow(x_test[img_number], cmap='gray')
for i in range(0, 30, 2):
plt.scatter(original_points[i:i+2][0], original_points[i:i+2][1], c='y')
plt.subplot(1,2,2)
plt.imshow(x_test[img_number], cmap='gray')
for i in range(0, 30, 2):
plt.scatter(predicted_points[i:i+2][0], predicted_points[i:i+2][1], c='r')
plt.show()
plot_output(555)

XGBoost as the Regressor


[37]: xgb_reg = xgb.XGBRegressor()

23
[38]: xgb_reg.fit(train_features, y_train) # training xgboost on train features

[38]: XGBRegressor(base_score=None, booster=None, callbacks=None,


colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, …)

[40]: y_pred_xgb = xgb_reg.predict(test_features)

[41]: print(f"the mean abosulute error for xgboost regressor is␣


↪{mean_absolute_error(y_pred_xgb, y_test)}")

the mean abosulute error for xgboost regressor is 1.331619245343443

[42]: def plot_output(img_number):


predicted_points = y_pred_xgb[img_number]
original_points = y_test[img_number]
plt.subplot(1,2,1)
plt.imshow(x_test[img_number], cmap='gray')
for i in range(0, 30, 2):
plt.scatter(original_points[i:i+2][0], original_points[i:i+2][1], c='y') ␣
↪# original

plt.subplot(1,2,2)
plt.imshow(x_test[img_number], cmap='gray')
for i in range(0, 30, 2):
plt.scatter(predicted_points[i:i+2][0], predicted_points[i:i+2][1], c='r') ␣
↪ # predicted

plt.show()
plot_output(555)

24
Comparing the 3 Models
[56]: def compare(img_number): # plotting the comparisons
y_pred_cnn = y_pred[img_number]
y_pred_rf = output_points[img_number]
y_pred_Xgb = y_pred_xgb[img_number]
y_original = y_test[img_number]

plt.figure(figsize=(15,10))
plt.subplot(1, 4, 1)
plt.imshow(x_test[img_number], cmap='gray')
plt.title("Original Landmarks")
plt.axis("off")
plt.imshow(x_test[img_number], cmap='gray')
for i in range(0, 30, 2):
plt.scatter(y_original[i:i+2][0], y_original[i:i+2][1], c='y')

plt.subplot(1, 4, 2)
plt.imshow(x_test[img_number], cmap='gray')
plt.title("CNN prediction")
plt.axis("off")
for i in range(0, 30, 2):
plt.scatter(y_pred_cnn[i:i+2][0], y_pred_cnn[i:i+2][1], c='y')

plt.subplot(1, 4, 3)
plt.imshow(x_test[img_number], cmap='gray')
plt.title("Random Forest prediction")
plt.axis("off")
for i in range(0, 30, 2):
plt.scatter(y_pred_rf[i:i+2][0], y_pred_rf[i:i+2][1], c='y')

25
plt.subplot(1, 4, 4)
plt.imshow(x_test[img_number], cmap='gray')
plt.title("XGBoost prediction")
plt.axis("off")
for i in range(0, 30, 2):
plt.scatter(y_pred_Xgb[i:i+2][0], y_pred_Xgb[i:i+2][1], c='y')
plt.show()

compare(23)

[ ]:

26

You might also like