0% found this document useful (0 votes)
4 views20 pages

Brain Tumor Segmentation

The document outlines a Jupyter notebook for brain tumor segmentation using a U-Net model, achieving a Dice coefficient of 89.6%. It includes code for data handling, model creation, training, and evaluation, utilizing libraries such as TensorFlow, Keras, and OpenCV. Key functions include data augmentation, model training history visualization, and performance metrics calculation.

Uploaded by

Dinesh Anbarasan
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
4 views20 pages

Brain Tumor Segmentation

The document outlines a Jupyter notebook for brain tumor segmentation using a U-Net model, achieving a Dice coefficient of 89.6%. It includes code for data handling, model creation, training, and evaluation, utilizing libraries such as TensorFlow, Keras, and OpenCV. Key functions include data augmentation, model training history visualization, and performance metrics calculation.

Uploaded by

Dinesh Anbarasan
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 20

brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

# import system libs


import os
import time
import random
import pathlib
import itertools
from glob import glob
from tqdm import tqdm_notebook, tnrange

# import data handling tools


import cv2
import numpy as np
import pandas as pd
import seaborn as sns
sns.set_style('darkgrid')
import matplotlib.pyplot as plt
%matplotlib inline
from skimage.color import rgb2gray
from skimage.morphology import label
from skimage.transform import resize
from sklearn.model_selection import train_test_split
from skimage.io import imread, imshow, concatenate_images

# import Deep learning Libraries


import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import backend as K
from tensorflow.keras.models import Model, load_model, save_model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.optimizers import Adam, Adamax
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.layers import Input, Activation, BatchNormalization, Dropout, Lambda

# Ignore Warnings
import warnings
warnings.filterwarnings("ignore")

print ('modules loaded')

modules loaded

 Create needed functions

 Function to create dataframe from dataset

# function to create dataframe


def create_df(data_dir):
images_paths = []
masks_paths = glob(f'{data_dir}/*/*_mask*')

1 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

masks_paths = glob(f {data_dir}/*/*_mask*')

for i in masks_paths:
images_paths.append(i.replace('_mask', ''))

df = pd.DataFrame(data= {'images_paths': images_paths, 'masks_paths': masks_paths})

return df

# Function to split dataframe into train, valid, test


def split_df(df):
# create train_df
train_df, dummy_df = train_test_split(df, train_size= 0.8)

# create valid_df and test_df


valid_df, test_df = train_test_split(dummy_df, train_size= 0.5)

return train_df, valid_df, test_df

 Function to create image generators and augmentation

def create_gens(df, aug_dict):


img_size = (256, 256)
batch_size = 40

img_gen = ImageDataGenerator(**aug_dict)
msk_gen = ImageDataGenerator(**aug_dict)

# Create general generator


image_gen = img_gen.flow_from_dataframe(df, x_col='images_paths', class_mode=None, color_m
batch_size=batch_size, save_to_dir=None, save_pref

mask_gen = msk_gen.flow_from_dataframe(df, x_col='masks_paths', class_mode=None, color_mod


batch_size=batch_size, save_to_dir=None, save_pref

gen = zip(image_gen, mask_gen)

for (img, msk) in gen:


img = img / 255
msk = msk / 255
msk[msk > 0.5] = 1
msk[msk <= 0.5] = 0

yield (img, msk)

 Function that have Unet structure

def unet(input_size=(256, 256, 3)):


inputs = Input(input_size)

2 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

# First DownConvolution / Encoder Leg will begin, so start with Conv2D


conv1 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(inputs)
bn1 = Activation("relu")(conv1)
conv1 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(bn1)
bn1 = BatchNormalization(axis=3)(conv1)
bn1 = Activation("relu")(bn1)
pool1 = MaxPooling2D(pool_size=(2, 2))(bn1)

conv2 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(pool1)


bn2 = Activation("relu")(conv2)
conv2 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(bn2)
bn2 = BatchNormalization(axis=3)(conv2)
bn2 = Activation("relu")(bn2)
pool2 = MaxPooling2D(pool_size=(2, 2))(bn2)

conv3 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(pool2)


bn3 = Activation("relu")(conv3)
conv3 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(bn3)
bn3 = BatchNormalization(axis=3)(conv3)
bn3 = Activation("relu")(bn3)
pool3 = MaxPooling2D(pool_size=(2, 2))(bn3)

conv4 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(pool3)


bn4 = Activation("relu")(conv4)
conv4 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(bn4)
bn4 = BatchNormalization(axis=3)(conv4)
bn4 = Activation("relu")(bn4)
pool4 = MaxPooling2D(pool_size=(2, 2))(bn4)

conv5 = Conv2D(filters=1024, kernel_size=(3, 3), padding="same")(pool4)


bn5 = Activation("relu")(conv5)
conv5 = Conv2D(filters=1024, kernel_size=(3, 3), padding="same")(bn5)
bn5 = BatchNormalization(axis=3)(conv5)
bn5 = Activation("relu")(bn5)

""" Now UpConvolution / Decoder Leg will begin, so start with Conv2DTranspose
The gray arrows (in the above image) indicate the skip connections that concatenate the en
""" After every concatenation we again apply two consecutive regular convolutions so that

up6 = concatenate([Conv2DTranspose(512, kernel_size=(2, 2), strides=(2, 2), padding=


conv6 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(up6)
bn6 = Activation("relu")(conv6)
conv6 = Conv2D(filters=512, kernel_size=(3, 3), padding="same")(bn6)
bn6 = BatchNormalization(axis=3)(conv6)
bn6 = Activation("relu")(bn6)

up7 = concatenate([Conv2DTranspose(256, kernel_size=(2, 2), strides=(2, 2), padding=


conv7 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(up7)
bn7 = Activation("relu")(conv7)
conv7 = Conv2D(filters=256, kernel_size=(3, 3), padding="same")(bn7)
bn7 = BatchNormalization(axis=3)(conv7)
bn7 = Activation("relu")(bn7)

up8 = concatenate([Conv2DTranspose(128 kernel_size=(2 2), strides=(2 2), padding=

3 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

up8 = concatenate([Conv2DTranspose(128, kernel_size=(2, 2), strides=(2, 2), padding=


conv8 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(up8)
bn8 = Activation("relu")(conv8)
conv8 = Conv2D(filters=128, kernel_size=(3, 3), padding="same")(bn8)
bn8 = BatchNormalization(axis=3)(conv8)
bn8 = Activation("relu")(bn8)

up9 = concatenate([Conv2DTranspose(64, kernel_size=(2, 2), strides=(2, 2), padding=


conv9 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(up9)
bn9 = Activation("relu")(conv9)
conv9 = Conv2D(filters=64, kernel_size=(3, 3), padding="same")(bn9)
bn9 = BatchNormalization(axis=3)(conv9)
bn9 = Activation("relu")(bn9)

conv10 = Conv2D(filters=1, kernel_size=(1, 1), activation="sigmoid")(bn9)

return Model(inputs=[inputs], outputs=[conv10])

 Functions for coe�cients and loss

# function to create dice coefficient


def dice_coef(y_true, y_pred, smooth=100):
y_true_flatten = K.flatten(y_true)
y_pred_flatten = K.flatten(y_pred)

intersection = K.sum(y_true_flatten * y_pred_flatten)


union = K.sum(y_true_flatten) + K.sum(y_pred_flatten)
return (2 * intersection + smooth) / (union + smooth)

# function to create dice loss


def dice_loss(y_true, y_pred, smooth=100):
return -dice_coef(y_true, y_pred, smooth)

# function to create iou coefficient


def iou_coef(y_true, y_pred, smooth=100):
intersection = K.sum(y_true * y_pred)
sum = K.sum(y_true + y_pred)
iou = (intersection + smooth) / (sum - intersection + smooth)
return iou

 Function to show images sample

def show_images(images, masks):


plt.figure(figsize=(12, 12))
for i in range(25):
plt.subplot(5, 5, i+1)
img_path = images[i]
mask_path = masks[i]
# read image and convert it to RGB scale
image = cv2.imread(img_path)
image = cv2.cvtColor(image cv2.COLOR_BGR2RGB)

4 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)


# read mask
mask = cv2.imread(mask_path)
# sho image and mask
plt.imshow(image)
plt.imshow(mask, alpha=0.4)

plt.axis('off')

plt.tight_layout()
plt.show()

 Function to display training history

def plot_training(hist):
'''
This function take training model and plot history of accuracy and losses with the best ep
'''

# Define needed variables


tr_acc = hist.history['accuracy']
tr_iou = hist.history['iou_coef']
tr_dice = hist.history['dice_coef']
tr_loss = hist.history['loss']

val_acc = hist.history['val_accuracy']
val_iou = hist.history['val_iou_coef']
val_dice = hist.history['val_dice_coef']
val_loss = hist.history['val_loss']

index_acc = np.argmax(val_acc)
acc_highest = val_acc[index_acc]
index_iou = np.argmax(iou_coef)
iou_highest = val_iou[index_iou]
index_dice = np.argmax(dice_coef)
dice_highest = val_dice[index_dice]
index_loss = np.argmin(val_loss)
val_lowest = val_loss[index_loss]

Epochs = [i+1 for i in range(len(tr_acc))]

acc_label = f'best epoch= {str(index_acc + 1)}'


iou_label = f'best epoch= {str(index_iou + 1)}'
dice_label = f'best epoch= {str(index_dice + 1)}'
loss_label = f'best epoch= {str(index_loss + 1)}'

# Plot training history


plt.figure(figsize= (20, 20))
plt.style.use('fivethirtyeight')

# Training Accuracy
plt.subplot(2, 2, 1)
plt.plot(Epochs tr_acc 'r' label= 'Training Accuracy')

5 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

plt.plot(Epochs, tr_acc, 'r', label= 'Training Accuracy')


plt.plot(Epochs, val_acc, 'g', label= 'Validation Accuracy')
plt.scatter(index_acc + 1 , acc_highest, s= 150, c= 'blue', label= acc_label)
plt.title('Training and Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()

# Training IoU
plt.subplot(2, 2, 2)
plt.plot(Epochs, tr_iou, 'r', label= 'Training IoU')
plt.plot(Epochs, val_iou, 'g', label= 'Validation IoU')
plt.scatter(index_iou + 1 , iou_highest, s= 150, c= 'blue', label= iou_label)
plt.title('Training and Validation IoU Coefficient')
plt.xlabel('Epochs')
plt.ylabel('IoU')
plt.legend()

# Training Dice
plt.subplot(2, 2, 3)
plt.plot(Epochs, tr_dice, 'r', label= 'Training Dice')
plt.plot(Epochs, val_dice, 'g', label= 'Validation Dice')
plt.scatter(index_dice + 1 , dice_highest, s= 150, c= 'blue', label= dice_label)
plt.title('Training and Validation Dice Coefficient')
plt.xlabel('Epochs')
plt.ylabel('Dice')
plt.legend()

# Training Loss
plt.subplot(2, 2, 4)
plt.plot(Epochs, tr_loss, 'r', label= 'Training loss')
plt.plot(Epochs, val_loss, 'g', label= 'Validation loss')
plt.scatter(index_loss + 1, val_lowest, s= 150, c= 'blue', label= loss_label)
plt.title('Training and Validation Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()

plt.tight_layout
plt.show()

 Model Structure

 Start reading data

data_dir = './archive/lgg-mri-segmentation/kaggle_3m'

df = create_df(data_dir)
train_df, valid_df, test_df = split_df(df)

6 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

tr_aug_dict = dict(rotation_range=0.2,
width_shift_range=0.05,
height_shift_range=0.05,
shear_range=0.05,
zoom_range=0.05,
horizontal_flip=True,
fill_mode='nearest')

train_gen = create_gens(train_df, aug_dict=tr_aug_dict)


valid_gen = create_gens(valid_df, aug_dict={})
test_gen = create_gens(test_df, aug_dict={})

show_images(list(train_df['images_paths']), list(train_df['masks_paths']))

7 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

 Unet Model

model = unet()
model.compile(Adamax(learning_rate= 0.001), loss= dice_loss, metrics= ['accuracy', iou_coef

model.summary()

Show hidden output

 Model training

epochs = 120
batch_size = 40
callbacks = [ModelCheckpoint('unet.hdf5', verbose=0, save_best_only=True)]

history = model.fit(train_gen,
steps_per_epoch=len(train_df) / batch_size,
epochs=epochs,
verbose=1,
callbacks=callbacks,
validation_data = valid_gen,
validation_steps=len(valid_df) / batch_size)

Found 3143 validated image filenames.


Found 3143 validated image filenames.

8 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

Found 3143 validated image filenames.


Epoch 1/120
79/78 [==============================] - ETA: 0s - loss: -0.1287 - accuracy: 0.9356 - iou
Found 393 validated image filenames.
78/78 [==============================] - 182s 2s/step - loss: -0.1287 - accuracy: 0.9356
Epoch 2/120
78/78 [==============================] - 121s 2s/step - loss: -0.3003 - accuracy: 0.9869
Epoch 3/120
78/78 [==============================] - 121s 2s/step - loss: -0.5347 - accuracy: 0.9930
Epoch 4/120
78/78 [==============================] - 118s 2s/step - loss: -0.6374 - accuracy: 0.9939
Epoch 5/120
78/78 [==============================] - 119s 2s/step - loss: -0.6806 - accuracy: 0.9945
Epoch 6/120
78/78 [==============================] - 120s 2s/step - loss: -0.7032 - accuracy: 0.9946
Epoch 7/120
78/78 [==============================] - 118s 2s/step - loss: -0.7266 - accuracy: 0.9950
Epoch 8/120
78/78 [==============================] - 118s 2s/step - loss: -0.7169 - accuracy: 0.9947
Epoch 9/120
78/78 [==============================] - 120s 2s/step - loss: -0.7505 - accuracy: 0.9954
Epoch 10/120
78/78 [==============================] - 119s 2s/step - loss: -0.7599 - accuracy: 0.9955
Epoch 11/120
78/78 [==============================] - 119s 2s/step - loss: -0.7573 - accuracy: 0.9955
Epoch 12/120
78/78 [==============================] - 117s 1s/step - loss: -0.7754 - accuracy: 0.9957
Epoch 13/120
78/78 [==============================] - 119s 2s/step - loss: -0.7859 - accuracy: 0.9959
Epoch 14/120
78/78 [==============================] - 119s 2s/step - loss: -0.7812 - accuracy: 0.9959
Epoch 15/120
78/78 [==============================] - 120s 2s/step - loss: -0.7933 - accuracy: 0.9962
Epoch 16/120
78/78 [==============================] - 117s 1s/step - loss: -0.7888 - accuracy: 0.9959
Epoch 17/120
78/78 [==============================] - 119s 2s/step - loss: -0.8096 - accuracy: 0.9963
Epoch 18/120
78/78 [==============================] - 118s 2s/step - loss: -0.7933 - accuracy: 0.9962
Epoch 19/120
78/78 [==============================] - 119s 2s/step - loss: -0.8061 - accuracy: 0.9963
Epoch 20/120
78/78 [==============================] - 120s 2s/step - loss: -0.8240 - accuracy: 0.9966
Epoch 21/120
78/78 [==============================] - 119s 2s/step - loss: -0.8322 - accuracy: 0.9967
Epoch 22/120
78/78 [==============================] - 117s 1s/step - loss: -0.8326 - accuracy: 0.9967
Epoch 23/120
78/78 [==============================] - 119s 2s/step - loss: -0.8232 - accuracy: 0.9967
Epoch 24/120
78/78 [==============================] - 118s 2s/step - loss: -0.8377 - accuracy: 0.9969
Epoch 25/120
78/78 [==============================] - 119s 2s/step - loss: -0.8374 - accuracy: 0.9969
Epoch 26/120
78/78 [==============================] - 118s 2s/step - loss: -0.8375 - accuracy: 0.9969
Epoch 27/120
78/78 [==============================] - 118s 2s/step - loss: -0.8464 - accuracy: 0.9971
Epoch 28/120
plot_training(history)

9 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

10 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

 Model Evaluation

ts_length = len(test_df)
test_batch_size = max(sorted([ts_length // n for n in range(1, ts_length + 1) if ts_length%n =
test_steps = ts_length // test_batch_size

train_score = model.evaluate(train_gen, steps= test_steps, verbose= 1)


valid_score = model.evaluate(valid_gen, steps= test_steps, verbose= 1)
test_score = model.evaluate(test_gen, steps= test_steps, verbose= 1)

print("Train Loss: ", train_score[0])


print("Train Accuracy: ", train_score[1])
print("Train IoU: ", train_score[2])
print("Train Dice: ", train_score[3])
print('-' * 20)

print("Valid Loss: ", valid_score[0])


print("Valid Accuracy: ", valid_score[1])
print("Valid IoU: ", valid_score[2])
print("Valid Dice: ", valid_score[3])
print('-' * 20)

11 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

print('-' * 20)

print("Test Loss: ", test_score[0])


print("Test Accuracy: ", test_score[1])
print("Test IoU: ", test_score[2])
print("Test Dice: ", test_score[3])

131/131 [==============================] - 124s 954ms/step - loss: -0.9099 - accuracy: 0.


131/131 [==============================] - 43s 325ms/step - loss: -0.8535 - accuracy: 0.9
Found 393 validated image filenames.
Found 393 validated image filenames.
131/131 [==============================] - 44s 334ms/step - loss: -0.8971 - accuracy: 0.9
Train Loss: -0.9098997116088867
Train Accuracy: 0.9982170462608337
Train IoU: 0.8367183804512024
Train Dice: 0.9099515080451965
--------------------
Valid Loss: -0.8535401821136475
Valid Accuracy: 0.9973485469818115
Valid IoU: 0.7472280859947205
Valid Dice: 0.8531280755996704
--------------------
Test Loss: -0.8971260786056519
Test Accuracy: 0.9978862404823303
Test IoU: 0.815096378326416
Test Dice: 0.8968743681907654

 Prediction

for _ in range(20):
index = np.random.randint(1, len(test_df.index))
img = cv2.imread(test_df['images_paths'].iloc[index])
img = cv2.resize(img, (256, 256))
img = img/255
img = img[np.newaxis, :, :, : ]

predicted_img = model.predict(img)

plt.figure(figsize=(12, 12))

plt.subplot(1, 3, 1)
plt.imshow(np.squeeze(img))
plt.axis('off')
plt.title('Original Image')

plt.subplot(1, 3, 2)
plt.imshow(np.squeeze(cv2.imread(test_df['masks_paths'].iloc[index])))
plt.axis('off')
plt.title('Original Mask')

plt.subplot(1, 3, 3)
plt.imshow(np.squeeze(predicted_img) > 0.5 )
plt.title('Prediction')
plt.axis('off')

12 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

plt.axis('off')

plt.show()

13 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

14 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

15 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

16 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

17 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

18 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

19 of 20 30-03-2025, 17:57
brain-tumor-segmentation-unet-dice-coef-89-6.ipynb - Colab https://colab.research.google.com/drive/1VS6XxwilS40jo7f7nZ3y2...

20 of 20 30-03-2025, 17:57

You might also like