import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import zipfile
import cv2
from skimage import io
import tensorflow as tf
from tensorflow.python.keras import Sequential
from tensorflow.keras import layers, optimizers
from tensorflow.keras.layers import *
from tensorflow.keras.models import Model
from tensorflow.keras.initializers import glorot_uniform
from tensorflow.keras.utils import plot_model
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, LearningRateScheduler
import tensorflow.keras.backend as K
import random
import glob
from sklearn.preprocessing import StandardScaler, normalize
from IPython.display import display
data = pd.read_csv('brain_dataset/lgg-mri-segmentation/kaggle_3m/data.csv')
data.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 110 entries, 0 to 109 Data columns (total 18 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Patient 110 non-null object 1 RNASeqCluster 92 non-null float64 2 MethylationCluster 109 non-null float64 3 miRNACluster 110 non-null int64 4 CNCluster 108 non-null float64 5 RPPACluster 98 non-null float64 6 OncosignCluster 105 non-null float64 7 COCCluster 110 non-null int64 8 histological_type 109 non-null float64 9 neoplasm_histologic_grade 109 non-null float64 10 tumor_tissue_site 109 non-null float64 11 laterality 109 non-null float64 12 tumor_location 109 non-null float64 13 gender 109 non-null float64 14 age_at_initial_pathologic 109 non-null float64 15 race 108 non-null float64 16 ethnicity 102 non-null float64 17 death01 109 non-null float64 dtypes: float64(15), int64(2), object(1) memory usage: 15.6+ KB
data.head(10)
| Patient | RNASeqCluster | MethylationCluster | miRNACluster | CNCluster | RPPACluster | OncosignCluster | COCCluster | histological_type | neoplasm_histologic_grade | tumor_tissue_site | laterality | tumor_location | gender | age_at_initial_pathologic | race | ethnicity | death01 | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | TCGA_CS_4941 | 2.0 | 4.0 | 2 | 2.0 | NaN | 3.0 | 2 | 1.0 | 2.0 | 1.0 | 3.0 | 2.0 | 2.0 | 67.0 | 3.0 | 2.0 | 1.0 | 
| 1 | TCGA_CS_4942 | 1.0 | 5.0 | 2 | 1.0 | 1.0 | 2.0 | 1 | 1.0 | 2.0 | 1.0 | 3.0 | 2.0 | 1.0 | 44.0 | 2.0 | NaN | 1.0 | 
| 2 | TCGA_CS_4943 | 1.0 | 5.0 | 2 | 1.0 | 2.0 | 2.0 | 1 | 1.0 | 2.0 | 1.0 | 1.0 | 2.0 | 2.0 | 37.0 | 3.0 | NaN | 0.0 | 
| 3 | TCGA_CS_4944 | NaN | 5.0 | 2 | 1.0 | 2.0 | 1.0 | 1 | 1.0 | 1.0 | 1.0 | 3.0 | 6.0 | 2.0 | 50.0 | 3.0 | NaN | 0.0 | 
| 4 | TCGA_CS_5393 | 4.0 | 5.0 | 2 | 1.0 | 2.0 | 3.0 | 1 | 1.0 | 2.0 | 1.0 | 1.0 | 6.0 | 2.0 | 39.0 | 3.0 | NaN | 0.0 | 
| 5 | TCGA_CS_5395 | 2.0 | 4.0 | 2 | 2.0 | NaN | 3.0 | 2 | 3.0 | 1.0 | 1.0 | 3.0 | 5.0 | 2.0 | 43.0 | 2.0 | NaN | 1.0 | 
| 6 | TCGA_CS_5396 | 3.0 | 3.0 | 2 | 3.0 | 2.0 | 2.0 | 3 | 3.0 | 2.0 | 1.0 | 3.0 | 2.0 | 1.0 | 53.0 | 3.0 | 2.0 | 0.0 | 
| 7 | TCGA_CS_5397 | NaN | 4.0 | 1 | 2.0 | 3.0 | 3.0 | 2 | 1.0 | 2.0 | 1.0 | 1.0 | 6.0 | 1.0 | 54.0 | 3.0 | 2.0 | 1.0 | 
| 8 | TCGA_CS_6186 | 2.0 | 4.0 | 1 | 2.0 | 1.0 | 3.0 | 2 | 2.0 | 2.0 | 1.0 | 3.0 | 2.0 | 2.0 | 58.0 | 3.0 | 2.0 | 1.0 | 
| 9 | TCGA_CS_6188 | 2.0 | 4.0 | 3 | 2.0 | 3.0 | 3.0 | 2 | 1.0 | 2.0 | 1.0 | 3.0 | 6.0 | 2.0 | 48.0 | 3.0 | 2.0 | 0.0 | 
data_map = []
for sub_dir_path in glob.glob("brain_dataset/lgg-mri-segmentation/kaggle_3m/"+"*"):
    #if os.path.isdir(sub_path_dir):
    try:
        dir_name = sub_dir_path.split('/')[-1]
        for filename in os.listdir(sub_dir_path):
            image_path = sub_dir_path + '/' + filename
            data_map.extend([dir_name, image_path])
    except Exception as e:
        print(e)
[Errno 20] Not a directory: 'brain_dataset/lgg-mri-segmentation/kaggle_3m/data.csv' [Errno 20] Not a directory: 'brain_dataset/lgg-mri-segmentation/kaggle_3m/README.md'
df = pd.DataFrame({"patient_id" : data_map[::2],
                   "path" : data_map[1::2]})
df.head()
| patient_id | path | |
|---|---|---|
| 0 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 
| 1 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 
| 2 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 
| 3 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 
| 4 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 
df_imgs = df[~df['path'].str.contains("mask")]
df_masks = df[df['path'].str.contains("mask")]
# File path line length images for later sorting
BASE_LEN = 89 
END_IMG_LEN = 4 
END_MASK_LEN = 9 
# Data sorting
imgs = sorted(df_imgs["path"].values, key=lambda x : int(x[BASE_LEN:-END_IMG_LEN]))
masks = sorted(df_masks["path"].values, key=lambda x : int(x[BASE_LEN:-END_MASK_LEN]))
# Sorting check
idx = random.randint(0, len(imgs)-1)
print("Path to the Image:", imgs[idx], "\nPath to the Mask:", masks[idx])
Path to the Image: brain_dataset/lgg-mri-segmentation/kaggle_3m/TCGA_DU_A5TY_19970709/TCGA_DU_A5TY_19970709_36.tif Path to the Mask: brain_dataset/lgg-mri-segmentation/kaggle_3m/TCGA_DU_A5TY_19970709/TCGA_DU_A5TY_19970709_36_mask.tif
# Final dataframe
brain_df = pd.DataFrame({"patient_id": df_imgs.patient_id.values,
                         "image_path": imgs,
                         "mask_path": masks
                        })
def pos_neg_diagnosis(mask_path):
    value = np.max(cv2.imread(mask_path))
    if value > 0 : 
        return 1
    else:
        return 0
    
brain_df['mask'] = brain_df['mask_path'].apply(lambda x: pos_neg_diagnosis(x))
brain_df
| patient_id | image_path | mask_path | mask | |
|---|---|---|---|---|
| 0 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 1 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 2 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 3 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 4 | TCGA_CS_6667_20011105 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| ... | ... | ... | ... | ... | 
| 3924 | TCGA_FG_A60K_20040224 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 3925 | TCGA_FG_A60K_20040224 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 3926 | TCGA_FG_A60K_20040224 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 3927 | TCGA_FG_A60K_20040224 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
| 3928 | TCGA_FG_A60K_20040224 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | 
3929 rows × 4 columns
brain_df['mask'].value_counts()
0 2556 1 1373 Name: mask, dtype: int64
import plotly.graph_objects as go  # using plotly to create interactive plots
fig = go.Figure([go.Bar(x=brain_df['mask'].value_counts().index, 
                        y=brain_df['mask'].value_counts(), 
                        width=[.4, .4]
                       )
                ])
fig.update_traces(marker_color='rgb(158,202,225)', marker_line_color='rgb(8,48,107)',
                  marker_line_width=4, opacity=0.4
                 )
fig.update_layout(title_text="Mask Count Plot",
                  width=700,
                  height=550,
                  yaxis=dict(
                             title_text="Count",
                             tickmode="array",
                             titlefont=dict(size=20)
                           )
                 )
fig.update_yaxes(automargin=True)
fig.show()
for i in range(len(brain_df)):
    if cv2.imread(brain_df.mask_path[i]).max() > 0:
        break
plt.figure(figsize=(8,8))
plt.subplot(1,2,1)
plt.imshow(cv2.imread(brain_df.mask_path[i]));
plt.title('Tumor Location')
plt.subplot(1,2,2)
plt.imshow(cv2.imread(brain_df.image_path[i]));
cv2.imread(brain_df.mask_path[i]).max(), cv2.imread(brain_df.mask_path[i]).min()
(255, 0)
# Basic visualizations: Visualize the images (MRI and Mask) in the dataset separately 
fig, axs = plt.subplots(6,2, figsize=(16,26))
count = 0
for x in range(6):
  i = random.randint(0, len(brain_df)) # select a random index
  axs[count][0].title.set_text("Brain MRI") # set title
  axs[count][0].imshow(cv2.imread(brain_df.image_path[i])) # show MRI 
  axs[count][1].title.set_text("Mask - " + str(brain_df['mask'][i])) # plot title on the mask (0 or 1)
  axs[count][1].imshow(cv2.imread(brain_df.mask_path[i])) # Show corresponding mask
  count += 1
fig.tight_layout()
count = 0
i = 0
fig,axs = plt.subplots(12,3, figsize=(20,50))
for mask in brain_df['mask']:
    if (mask==1):
        img = io.imread(brain_df.image_path[i])
        axs[count][0].title.set_text("Brain MRI")
        axs[count][0].imshow(img)
        
        mask = io.imread(brain_df.mask_path[i])
        axs[count][1].title.set_text("Mask")
        axs[count][1].imshow(mask, cmap='gray')
        
        img[mask==255] = (0,255,150)  # change pixel color at the position of mask
        axs[count][2].title.set_text("MRI with Mask")
        axs[count][2].imshow(img)
        count +=1
    i += 1
    if (count==12):
        break
        
fig.tight_layout()
brain_df_train = brain_df.drop(columns=['patient_id'])
# Convert the data in mask column to string format, to use categorical mode in flow_from_dataframe
brain_df_train['mask'] = brain_df_train['mask'].apply(lambda x: str(x))
brain_df_train.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 3929 entries, 0 to 3928 Data columns (total 3 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 image_path 3929 non-null object 1 mask_path 3929 non-null object 2 mask 3929 non-null object dtypes: object(3) memory usage: 92.2+ KB
from sklearn.model_selection import train_test_split
train, test = train_test_split(brain_df_train, test_size=0.15)
from keras_preprocessing.image import ImageDataGenerator
datagen = ImageDataGenerator(rescale=1./255., validation_split=0.1)
train_generator = datagen.flow_from_dataframe(train,
                                              directory='./',
                                              x_col='image_path',
                                              y_col='mask',
                                              subset='training',
                                              class_mode='categorical',
                                              batch_size=16,
                                              shuffle=True,
                                              target_size=(256,256)
                                             )
valid_generator = datagen.flow_from_dataframe(train,
                                              directory='./',
                                              x_col='image_path',
                                              y_col='mask',
                                              subset='validation',
                                              class_mode='categorical',
                                              batch_size=16,
                                              shuffle=True,
                                              target_size=(256,256)
                                             )
test_datagen = ImageDataGenerator(rescale=1./255.)
test_generator = test_datagen.flow_from_dataframe(test,
                                                  directory='./',
                                                  x_col='image_path',
                                                  y_col='mask',
                                                  class_mode='categorical',
                                                  batch_size=16,
                                                  shuffle=False,
                                                  target_size=(256,256)
                                                 )
Found 3006 validated image filenames belonging to 2 classes. Found 333 validated image filenames belonging to 2 classes. Found 590 validated image filenames belonging to 2 classes.
from tensorflow.keras.applications.resnet50 import ResNet50
clf_model = ResNet50(weights='imagenet', include_top=False, input_tensor=Input(shape=(256,256,3)))
clf_model.summary()
Model: "resnet50"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 256, 256, 3  0           []                               
                                )]                                                                
                                                                                                  
 conv1_pad (ZeroPadding2D)      (None, 262, 262, 3)  0           ['input_1[0][0]']                
                                                                                                  
 conv1_conv (Conv2D)            (None, 128, 128, 64  9472        ['conv1_pad[0][0]']              
                                )                                                                 
                                                                                                  
 conv1_bn (BatchNormalization)  (None, 128, 128, 64  256         ['conv1_conv[0][0]']             
                                )                                                                 
                                                                                                  
 conv1_relu (Activation)        (None, 128, 128, 64  0           ['conv1_bn[0][0]']               
                                )                                                                 
                                                                                                  
 pool1_pad (ZeroPadding2D)      (None, 130, 130, 64  0           ['conv1_relu[0][0]']             
                                )                                                                 
                                                                                                  
 pool1_pool (MaxPooling2D)      (None, 64, 64, 64)   0           ['pool1_pad[0][0]']              
                                                                                                  
 conv2_block1_1_conv (Conv2D)   (None, 64, 64, 64)   4160        ['pool1_pool[0][0]']             
                                                                                                  
 conv2_block1_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block1_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block1_1_relu[0][0]']    
                                                                                                  
 conv2_block1_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block1_0_conv (Conv2D)   (None, 64, 64, 256)  16640       ['pool1_pool[0][0]']             
                                                                                                  
 conv2_block1_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block1_2_relu[0][0]']    
                                                                                                  
 conv2_block1_0_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_add (Add)         (None, 64, 64, 256)  0           ['conv2_block1_0_bn[0][0]',      
                                                                  'conv2_block1_3_bn[0][0]']      
                                                                                                  
 conv2_block1_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block1_add[0][0]']       
                                                                                                  
 conv2_block2_1_conv (Conv2D)   (None, 64, 64, 64)   16448       ['conv2_block1_out[0][0]']       
                                                                                                  
 conv2_block2_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block2_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block2_1_relu[0][0]']    
                                                                                                  
 conv2_block2_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block2_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block2_2_relu[0][0]']    
                                                                                                  
 conv2_block2_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_add (Add)         (None, 64, 64, 256)  0           ['conv2_block1_out[0][0]',       
                                                                  'conv2_block2_3_bn[0][0]']      
                                                                                                  
 conv2_block2_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block2_add[0][0]']       
                                                                                                  
 conv2_block3_1_conv (Conv2D)   (None, 64, 64, 64)   16448       ['conv2_block2_out[0][0]']       
                                                                                                  
 conv2_block3_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block3_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block3_1_relu[0][0]']    
                                                                                                  
 conv2_block3_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block3_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block3_2_relu[0][0]']    
                                                                                                  
 conv2_block3_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_add (Add)         (None, 64, 64, 256)  0           ['conv2_block2_out[0][0]',       
                                                                  'conv2_block3_3_bn[0][0]']      
                                                                                                  
 conv2_block3_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block3_add[0][0]']       
                                                                                                  
 conv3_block1_1_conv (Conv2D)   (None, 32, 32, 128)  32896       ['conv2_block3_out[0][0]']       
                                                                                                  
 conv3_block1_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block1_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block1_1_relu[0][0]']    
                                                                                                  
 conv3_block1_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block1_0_conv (Conv2D)   (None, 32, 32, 512)  131584      ['conv2_block3_out[0][0]']       
                                                                                                  
 conv3_block1_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block1_2_relu[0][0]']    
                                                                                                  
 conv3_block1_0_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_add (Add)         (None, 32, 32, 512)  0           ['conv3_block1_0_bn[0][0]',      
                                                                  'conv3_block1_3_bn[0][0]']      
                                                                                                  
 conv3_block1_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block1_add[0][0]']       
                                                                                                  
 conv3_block2_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block1_out[0][0]']       
                                                                                                  
 conv3_block2_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block2_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block2_1_relu[0][0]']    
                                                                                                  
 conv3_block2_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block2_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block2_2_relu[0][0]']    
                                                                                                  
 conv3_block2_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_add (Add)         (None, 32, 32, 512)  0           ['conv3_block1_out[0][0]',       
                                                                  'conv3_block2_3_bn[0][0]']      
                                                                                                  
 conv3_block2_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block2_add[0][0]']       
                                                                                                  
 conv3_block3_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block2_out[0][0]']       
                                                                                                  
 conv3_block3_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block3_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block3_1_relu[0][0]']    
                                                                                                  
 conv3_block3_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block3_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block3_2_relu[0][0]']    
                                                                                                  
 conv3_block3_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_add (Add)         (None, 32, 32, 512)  0           ['conv3_block2_out[0][0]',       
                                                                  'conv3_block3_3_bn[0][0]']      
                                                                                                  
 conv3_block3_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block3_add[0][0]']       
                                                                                                  
 conv3_block4_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block3_out[0][0]']       
                                                                                                  
 conv3_block4_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block4_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block4_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block4_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block4_1_relu[0][0]']    
                                                                                                  
 conv3_block4_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block4_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block4_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block4_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block4_2_relu[0][0]']    
                                                                                                  
 conv3_block4_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block4_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_add (Add)         (None, 32, 32, 512)  0           ['conv3_block3_out[0][0]',       
                                                                  'conv3_block4_3_bn[0][0]']      
                                                                                                  
 conv3_block4_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block4_add[0][0]']       
                                                                                                  
 conv4_block1_1_conv (Conv2D)   (None, 16, 16, 256)  131328      ['conv3_block4_out[0][0]']       
                                                                                                  
 conv4_block1_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block1_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block1_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block1_1_relu[0][0]']    
                                                                                                  
 conv4_block1_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block1_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block1_0_conv (Conv2D)   (None, 16, 16, 1024  525312      ['conv3_block4_out[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block1_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block1_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block1_0_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block1_0_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block1_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block1_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block1_add (Add)         (None, 16, 16, 1024  0           ['conv4_block1_0_bn[0][0]',      
                                )                                 'conv4_block1_3_bn[0][0]']      
                                                                                                  
 conv4_block1_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block1_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block2_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block1_out[0][0]']       
                                                                                                  
 conv4_block2_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block2_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block2_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block2_1_relu[0][0]']    
                                                                                                  
 conv4_block2_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block2_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block2_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block2_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block2_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block2_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block2_add (Add)         (None, 16, 16, 1024  0           ['conv4_block1_out[0][0]',       
                                )                                 'conv4_block2_3_bn[0][0]']      
                                                                                                  
 conv4_block2_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block2_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block3_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block2_out[0][0]']       
                                                                                                  
 conv4_block3_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block3_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block3_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block3_1_relu[0][0]']    
                                                                                                  
 conv4_block3_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block3_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block3_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block3_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block3_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block3_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block3_add (Add)         (None, 16, 16, 1024  0           ['conv4_block2_out[0][0]',       
                                )                                 'conv4_block3_3_bn[0][0]']      
                                                                                                  
 conv4_block3_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block3_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block4_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block3_out[0][0]']       
                                                                                                  
 conv4_block4_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block4_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block4_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block4_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block4_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block4_1_relu[0][0]']    
                                                                                                  
 conv4_block4_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block4_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block4_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block4_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block4_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block4_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block4_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block4_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block4_add (Add)         (None, 16, 16, 1024  0           ['conv4_block3_out[0][0]',       
                                )                                 'conv4_block4_3_bn[0][0]']      
                                                                                                  
 conv4_block4_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block4_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block5_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block4_out[0][0]']       
                                                                                                  
 conv4_block5_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block5_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block5_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block5_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block5_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block5_1_relu[0][0]']    
                                                                                                  
 conv4_block5_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block5_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block5_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block5_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block5_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block5_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block5_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block5_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block5_add (Add)         (None, 16, 16, 1024  0           ['conv4_block4_out[0][0]',       
                                )                                 'conv4_block5_3_bn[0][0]']      
                                                                                                  
 conv4_block5_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block5_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block6_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block5_out[0][0]']       
                                                                                                  
 conv4_block6_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block6_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block6_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block6_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block6_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block6_1_relu[0][0]']    
                                                                                                  
 conv4_block6_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block6_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block6_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block6_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block6_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block6_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block6_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block6_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block6_add (Add)         (None, 16, 16, 1024  0           ['conv4_block5_out[0][0]',       
                                )                                 'conv4_block6_3_bn[0][0]']      
                                                                                                  
 conv4_block6_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block6_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv5_block1_1_conv (Conv2D)   (None, 8, 8, 512)    524800      ['conv4_block6_out[0][0]']       
                                                                                                  
 conv5_block1_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block1_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block1_1_relu[0][0]']    
                                                                                                  
 conv5_block1_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block1_0_conv (Conv2D)   (None, 8, 8, 2048)   2099200     ['conv4_block6_out[0][0]']       
                                                                                                  
 conv5_block1_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block1_2_relu[0][0]']    
                                                                                                  
 conv5_block1_0_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block1_0_bn[0][0]',      
                                                                  'conv5_block1_3_bn[0][0]']      
                                                                                                  
 conv5_block1_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block1_add[0][0]']       
                                                                                                  
 conv5_block2_1_conv (Conv2D)   (None, 8, 8, 512)    1049088     ['conv5_block1_out[0][0]']       
                                                                                                  
 conv5_block2_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block2_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block2_1_relu[0][0]']    
                                                                                                  
 conv5_block2_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block2_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block2_2_relu[0][0]']    
                                                                                                  
 conv5_block2_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block1_out[0][0]',       
                                                                  'conv5_block2_3_bn[0][0]']      
                                                                                                  
 conv5_block2_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block2_add[0][0]']       
                                                                                                  
 conv5_block3_1_conv (Conv2D)   (None, 8, 8, 512)    1049088     ['conv5_block2_out[0][0]']       
                                                                                                  
 conv5_block3_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block3_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block3_1_relu[0][0]']    
                                                                                                  
 conv5_block3_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block3_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block3_2_relu[0][0]']    
                                                                                                  
 conv5_block3_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block2_out[0][0]',       
                                                                  'conv5_block3_3_bn[0][0]']      
                                                                                                  
 conv5_block3_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block3_add[0][0]']       
                                                                                                  
==================================================================================================
Total params: 23,587,712
Trainable params: 23,534,592
Non-trainable params: 53,120
__________________________________________________________________________________________________
# before this i tried with trainable layer but the accuracy was less as compared
for layer in clf_model.layers:
    layers.trainable = False
head = clf_model.output
head = AveragePooling2D(pool_size=(4,4))(head)
head = Flatten(name='Flatten')(head)
head = Dense(256, activation='relu')(head)
head = Dropout(0.3)(head)
head = Dense(256, activation='relu')(head)
head = Dropout(0.3)(head)
head = Dense(2, activation='softmax')(head)
model = Model(clf_model.input, head)
model.compile(loss = 'categorical_crossentropy', 
              optimizer='adam', 
              metrics= ["accuracy"]
             )
model.summary()
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 256, 256, 3  0           []                               
                                )]                                                                
                                                                                                  
 conv1_pad (ZeroPadding2D)      (None, 262, 262, 3)  0           ['input_1[0][0]']                
                                                                                                  
 conv1_conv (Conv2D)            (None, 128, 128, 64  9472        ['conv1_pad[0][0]']              
                                )                                                                 
                                                                                                  
 conv1_bn (BatchNormalization)  (None, 128, 128, 64  256         ['conv1_conv[0][0]']             
                                )                                                                 
                                                                                                  
 conv1_relu (Activation)        (None, 128, 128, 64  0           ['conv1_bn[0][0]']               
                                )                                                                 
                                                                                                  
 pool1_pad (ZeroPadding2D)      (None, 130, 130, 64  0           ['conv1_relu[0][0]']             
                                )                                                                 
                                                                                                  
 pool1_pool (MaxPooling2D)      (None, 64, 64, 64)   0           ['pool1_pad[0][0]']              
                                                                                                  
 conv2_block1_1_conv (Conv2D)   (None, 64, 64, 64)   4160        ['pool1_pool[0][0]']             
                                                                                                  
 conv2_block1_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block1_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block1_1_relu[0][0]']    
                                                                                                  
 conv2_block1_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block1_0_conv (Conv2D)   (None, 64, 64, 256)  16640       ['pool1_pool[0][0]']             
                                                                                                  
 conv2_block1_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block1_2_relu[0][0]']    
                                                                                                  
 conv2_block1_0_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block1_add (Add)         (None, 64, 64, 256)  0           ['conv2_block1_0_bn[0][0]',      
                                                                  'conv2_block1_3_bn[0][0]']      
                                                                                                  
 conv2_block1_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block1_add[0][0]']       
                                                                                                  
 conv2_block2_1_conv (Conv2D)   (None, 64, 64, 64)   16448       ['conv2_block1_out[0][0]']       
                                                                                                  
 conv2_block2_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block2_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block2_1_relu[0][0]']    
                                                                                                  
 conv2_block2_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block2_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block2_2_relu[0][0]']    
                                                                                                  
 conv2_block2_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block2_add (Add)         (None, 64, 64, 256)  0           ['conv2_block1_out[0][0]',       
                                                                  'conv2_block2_3_bn[0][0]']      
                                                                                                  
 conv2_block2_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block2_add[0][0]']       
                                                                                                  
 conv2_block3_1_conv (Conv2D)   (None, 64, 64, 64)   16448       ['conv2_block2_out[0][0]']       
                                                                                                  
 conv2_block3_1_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_1_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block3_2_conv (Conv2D)   (None, 64, 64, 64)   36928       ['conv2_block3_1_relu[0][0]']    
                                                                                                  
 conv2_block3_2_bn (BatchNormal  (None, 64, 64, 64)  256         ['conv2_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_2_relu (Activatio  (None, 64, 64, 64)  0           ['conv2_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv2_block3_3_conv (Conv2D)   (None, 64, 64, 256)  16640       ['conv2_block3_2_relu[0][0]']    
                                                                                                  
 conv2_block3_3_bn (BatchNormal  (None, 64, 64, 256)  1024       ['conv2_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv2_block3_add (Add)         (None, 64, 64, 256)  0           ['conv2_block2_out[0][0]',       
                                                                  'conv2_block3_3_bn[0][0]']      
                                                                                                  
 conv2_block3_out (Activation)  (None, 64, 64, 256)  0           ['conv2_block3_add[0][0]']       
                                                                                                  
 conv3_block1_1_conv (Conv2D)   (None, 32, 32, 128)  32896       ['conv2_block3_out[0][0]']       
                                                                                                  
 conv3_block1_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block1_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block1_1_relu[0][0]']    
                                                                                                  
 conv3_block1_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block1_0_conv (Conv2D)   (None, 32, 32, 512)  131584      ['conv2_block3_out[0][0]']       
                                                                                                  
 conv3_block1_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block1_2_relu[0][0]']    
                                                                                                  
 conv3_block1_0_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block1_add (Add)         (None, 32, 32, 512)  0           ['conv3_block1_0_bn[0][0]',      
                                                                  'conv3_block1_3_bn[0][0]']      
                                                                                                  
 conv3_block1_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block1_add[0][0]']       
                                                                                                  
 conv3_block2_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block1_out[0][0]']       
                                                                                                  
 conv3_block2_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block2_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block2_1_relu[0][0]']    
                                                                                                  
 conv3_block2_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block2_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block2_2_relu[0][0]']    
                                                                                                  
 conv3_block2_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block2_add (Add)         (None, 32, 32, 512)  0           ['conv3_block1_out[0][0]',       
                                                                  'conv3_block2_3_bn[0][0]']      
                                                                                                  
 conv3_block2_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block2_add[0][0]']       
                                                                                                  
 conv3_block3_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block2_out[0][0]']       
                                                                                                  
 conv3_block3_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block3_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block3_1_relu[0][0]']    
                                                                                                  
 conv3_block3_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block3_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block3_2_relu[0][0]']    
                                                                                                  
 conv3_block3_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block3_add (Add)         (None, 32, 32, 512)  0           ['conv3_block2_out[0][0]',       
                                                                  'conv3_block3_3_bn[0][0]']      
                                                                                                  
 conv3_block3_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block3_add[0][0]']       
                                                                                                  
 conv3_block4_1_conv (Conv2D)   (None, 32, 32, 128)  65664       ['conv3_block3_out[0][0]']       
                                                                                                  
 conv3_block4_1_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block4_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_1_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block4_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block4_2_conv (Conv2D)   (None, 32, 32, 128)  147584      ['conv3_block4_1_relu[0][0]']    
                                                                                                  
 conv3_block4_2_bn (BatchNormal  (None, 32, 32, 128)  512        ['conv3_block4_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_2_relu (Activatio  (None, 32, 32, 128)  0          ['conv3_block4_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv3_block4_3_conv (Conv2D)   (None, 32, 32, 512)  66048       ['conv3_block4_2_relu[0][0]']    
                                                                                                  
 conv3_block4_3_bn (BatchNormal  (None, 32, 32, 512)  2048       ['conv3_block4_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv3_block4_add (Add)         (None, 32, 32, 512)  0           ['conv3_block3_out[0][0]',       
                                                                  'conv3_block4_3_bn[0][0]']      
                                                                                                  
 conv3_block4_out (Activation)  (None, 32, 32, 512)  0           ['conv3_block4_add[0][0]']       
                                                                                                  
 conv4_block1_1_conv (Conv2D)   (None, 16, 16, 256)  131328      ['conv3_block4_out[0][0]']       
                                                                                                  
 conv4_block1_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block1_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block1_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block1_1_relu[0][0]']    
                                                                                                  
 conv4_block1_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block1_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block1_0_conv (Conv2D)   (None, 16, 16, 1024  525312      ['conv3_block4_out[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block1_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block1_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block1_0_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block1_0_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block1_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block1_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block1_add (Add)         (None, 16, 16, 1024  0           ['conv4_block1_0_bn[0][0]',      
                                )                                 'conv4_block1_3_bn[0][0]']      
                                                                                                  
 conv4_block1_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block1_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block2_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block1_out[0][0]']       
                                                                                                  
 conv4_block2_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block2_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block2_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block2_1_relu[0][0]']    
                                                                                                  
 conv4_block2_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block2_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block2_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block2_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block2_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block2_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block2_add (Add)         (None, 16, 16, 1024  0           ['conv4_block1_out[0][0]',       
                                )                                 'conv4_block2_3_bn[0][0]']      
                                                                                                  
 conv4_block2_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block2_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block3_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block2_out[0][0]']       
                                                                                                  
 conv4_block3_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block3_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block3_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block3_1_relu[0][0]']    
                                                                                                  
 conv4_block3_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block3_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block3_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block3_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block3_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block3_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block3_add (Add)         (None, 16, 16, 1024  0           ['conv4_block2_out[0][0]',       
                                )                                 'conv4_block3_3_bn[0][0]']      
                                                                                                  
 conv4_block3_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block3_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block4_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block3_out[0][0]']       
                                                                                                  
 conv4_block4_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block4_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block4_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block4_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block4_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block4_1_relu[0][0]']    
                                                                                                  
 conv4_block4_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block4_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block4_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block4_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block4_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block4_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block4_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block4_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block4_add (Add)         (None, 16, 16, 1024  0           ['conv4_block3_out[0][0]',       
                                )                                 'conv4_block4_3_bn[0][0]']      
                                                                                                  
 conv4_block4_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block4_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block5_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block4_out[0][0]']       
                                                                                                  
 conv4_block5_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block5_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block5_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block5_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block5_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block5_1_relu[0][0]']    
                                                                                                  
 conv4_block5_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block5_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block5_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block5_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block5_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block5_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block5_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block5_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block5_add (Add)         (None, 16, 16, 1024  0           ['conv4_block4_out[0][0]',       
                                )                                 'conv4_block5_3_bn[0][0]']      
                                                                                                  
 conv4_block5_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block5_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv4_block6_1_conv (Conv2D)   (None, 16, 16, 256)  262400      ['conv4_block5_out[0][0]']       
                                                                                                  
 conv4_block6_1_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block6_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block6_1_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block6_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block6_2_conv (Conv2D)   (None, 16, 16, 256)  590080      ['conv4_block6_1_relu[0][0]']    
                                                                                                  
 conv4_block6_2_bn (BatchNormal  (None, 16, 16, 256)  1024       ['conv4_block6_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv4_block6_2_relu (Activatio  (None, 16, 16, 256)  0          ['conv4_block6_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv4_block6_3_conv (Conv2D)   (None, 16, 16, 1024  263168      ['conv4_block6_2_relu[0][0]']    
                                )                                                                 
                                                                                                  
 conv4_block6_3_bn (BatchNormal  (None, 16, 16, 1024  4096       ['conv4_block6_3_conv[0][0]']    
 ization)                       )                                                                 
                                                                                                  
 conv4_block6_add (Add)         (None, 16, 16, 1024  0           ['conv4_block5_out[0][0]',       
                                )                                 'conv4_block6_3_bn[0][0]']      
                                                                                                  
 conv4_block6_out (Activation)  (None, 16, 16, 1024  0           ['conv4_block6_add[0][0]']       
                                )                                                                 
                                                                                                  
 conv5_block1_1_conv (Conv2D)   (None, 8, 8, 512)    524800      ['conv4_block6_out[0][0]']       
                                                                                                  
 conv5_block1_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block1_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block1_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block1_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block1_1_relu[0][0]']    
                                                                                                  
 conv5_block1_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block1_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block1_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block1_0_conv (Conv2D)   (None, 8, 8, 2048)   2099200     ['conv4_block6_out[0][0]']       
                                                                                                  
 conv5_block1_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block1_2_relu[0][0]']    
                                                                                                  
 conv5_block1_0_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block1_0_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block1_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block1_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block1_0_bn[0][0]',      
                                                                  'conv5_block1_3_bn[0][0]']      
                                                                                                  
 conv5_block1_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block1_add[0][0]']       
                                                                                                  
 conv5_block2_1_conv (Conv2D)   (None, 8, 8, 512)    1049088     ['conv5_block1_out[0][0]']       
                                                                                                  
 conv5_block2_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block2_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block2_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block2_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block2_1_relu[0][0]']    
                                                                                                  
 conv5_block2_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block2_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block2_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block2_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block2_2_relu[0][0]']    
                                                                                                  
 conv5_block2_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block2_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block2_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block1_out[0][0]',       
                                                                  'conv5_block2_3_bn[0][0]']      
                                                                                                  
 conv5_block2_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block2_add[0][0]']       
                                                                                                  
 conv5_block3_1_conv (Conv2D)   (None, 8, 8, 512)    1049088     ['conv5_block2_out[0][0]']       
                                                                                                  
 conv5_block3_1_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block3_1_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_1_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block3_1_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block3_2_conv (Conv2D)   (None, 8, 8, 512)    2359808     ['conv5_block3_1_relu[0][0]']    
                                                                                                  
 conv5_block3_2_bn (BatchNormal  (None, 8, 8, 512)   2048        ['conv5_block3_2_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_2_relu (Activatio  (None, 8, 8, 512)   0           ['conv5_block3_2_bn[0][0]']      
 n)                                                                                               
                                                                                                  
 conv5_block3_3_conv (Conv2D)   (None, 8, 8, 2048)   1050624     ['conv5_block3_2_relu[0][0]']    
                                                                                                  
 conv5_block3_3_bn (BatchNormal  (None, 8, 8, 2048)  8192        ['conv5_block3_3_conv[0][0]']    
 ization)                                                                                         
                                                                                                  
 conv5_block3_add (Add)         (None, 8, 8, 2048)   0           ['conv5_block2_out[0][0]',       
                                                                  'conv5_block3_3_bn[0][0]']      
                                                                                                  
 conv5_block3_out (Activation)  (None, 8, 8, 2048)   0           ['conv5_block3_add[0][0]']       
                                                                                                  
 average_pooling2d (AveragePool  (None, 2, 2, 2048)  0           ['conv5_block3_out[0][0]']       
 ing2D)                                                                                           
                                                                                                  
 Flatten (Flatten)              (None, 8192)         0           ['average_pooling2d[0][0]']      
                                                                                                  
 dense (Dense)                  (None, 256)          2097408     ['Flatten[0][0]']                
                                                                                                  
 dropout (Dropout)              (None, 256)          0           ['dense[0][0]']                  
                                                                                                  
 dense_1 (Dense)                (None, 256)          65792       ['dropout[0][0]']                
                                                                                                  
 dropout_1 (Dropout)            (None, 256)          0           ['dense_1[0][0]']                
                                                                                                  
 dense_2 (Dense)                (None, 2)            514         ['dropout_1[0][0]']              
                                                                                                  
==================================================================================================
Total params: 25,751,426
Trainable params: 25,698,306
Non-trainable params: 53,120
__________________________________________________________________________________________________
earlystopping = EarlyStopping(monitor='val_loss', 
                              mode='min', 
                              verbose=1, 
                              patience=15
                             )
checkpointer = ModelCheckpoint(filepath="clf-resnet-weights.hdf5", 
                               verbose=1, 
                               save_best_only=True
                              )
reduce_lr = ReduceLROnPlateau(monitor='val_loss',
                              mode='min',
                              verbose=1,
                              patience=10,
                              min_delta=0.0001,
                              factor=0.2
                             )
callbacks = [checkpointer, earlystopping, reduce_lr]
h = model.fit(train_generator, 
              steps_per_epoch= train_generator.n // train_generator.batch_size, 
              epochs = 50, 
              validation_data= valid_generator, 
              validation_steps= valid_generator.n // valid_generator.batch_size, 
              callbacks=[checkpointer, earlystopping])
Epoch 1/50
2023-01-13 07:40:11.581318: W tensorflow/tsl/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz
187/187 [==============================] - ETA: 0s - loss: 0.7085 - accuracy: 0.7278 Epoch 1: val_loss improved from inf to 1.07708, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 522s 3s/step - loss: 0.7085 - accuracy: 0.7278 - val_loss: 1.0771 - val_accuracy: 0.6281 Epoch 2/50 187/187 [==============================] - ETA: 0s - loss: 0.4584 - accuracy: 0.7977 Epoch 2: val_loss improved from 1.07708 to 0.82635, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 523s 3s/step - loss: 0.4584 - accuracy: 0.7977 - val_loss: 0.8264 - val_accuracy: 0.6250 Epoch 3/50 187/187 [==============================] - ETA: 0s - loss: 0.3876 - accuracy: 0.8355 Epoch 3: val_loss improved from 0.82635 to 0.69625, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 526s 3s/step - loss: 0.3876 - accuracy: 0.8355 - val_loss: 0.6962 - val_accuracy: 0.6219 Epoch 4/50 187/187 [==============================] - ETA: 0s - loss: 0.3591 - accuracy: 0.8619 Epoch 4: val_loss did not improve from 0.69625 187/187 [==============================] - 543s 3s/step - loss: 0.3591 - accuracy: 0.8619 - val_loss: 1.0981 - val_accuracy: 0.6375 Epoch 5/50 187/187 [==============================] - ETA: 0s - loss: 0.3425 - accuracy: 0.8726 Epoch 5: val_loss improved from 0.69625 to 0.59643, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 535s 3s/step - loss: 0.3425 - accuracy: 0.8726 - val_loss: 0.5964 - val_accuracy: 0.6719 Epoch 6/50 187/187 [==============================] - ETA: 0s - loss: 0.3549 - accuracy: 0.8595 Epoch 6: val_loss did not improve from 0.59643 187/187 [==============================] - 528s 3s/step - loss: 0.3549 - accuracy: 0.8595 - val_loss: 1.0486 - val_accuracy: 0.7750 Epoch 7/50 187/187 [==============================] - ETA: 0s - loss: 0.2745 - accuracy: 0.8963 Epoch 7: val_loss improved from 0.59643 to 0.23840, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 530s 3s/step - loss: 0.2745 - accuracy: 0.8963 - val_loss: 0.2384 - val_accuracy: 0.9187 Epoch 8/50 187/187 [==============================] - ETA: 0s - loss: 0.2036 - accuracy: 0.9284 Epoch 8: val_loss did not improve from 0.23840 187/187 [==============================] - 533s 3s/step - loss: 0.2036 - accuracy: 0.9284 - val_loss: 0.2434 - val_accuracy: 0.9187 Epoch 9/50 187/187 [==============================] - ETA: 0s - loss: 0.1963 - accuracy: 0.9251 Epoch 9: val_loss improved from 0.23840 to 0.17171, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 521s 3s/step - loss: 0.1963 - accuracy: 0.9251 - val_loss: 0.1717 - val_accuracy: 0.9375 Epoch 10/50 187/187 [==============================] - ETA: 0s - loss: 0.2147 - accuracy: 0.9164 Epoch 10: val_loss did not improve from 0.17171 187/187 [==============================] - 506s 3s/step - loss: 0.2147 - accuracy: 0.9164 - val_loss: 0.4331 - val_accuracy: 0.8594 Epoch 11/50 187/187 [==============================] - ETA: 0s - loss: 0.1850 - accuracy: 0.9368 Epoch 11: val_loss did not improve from 0.17171 187/187 [==============================] - 512s 3s/step - loss: 0.1850 - accuracy: 0.9368 - val_loss: 0.6973 - val_accuracy: 0.8750 Epoch 12/50 187/187 [==============================] - ETA: 0s - loss: 0.1636 - accuracy: 0.9344 Epoch 12: val_loss did not improve from 0.17171 187/187 [==============================] - 508s 3s/step - loss: 0.1636 - accuracy: 0.9344 - val_loss: 0.2608 - val_accuracy: 0.8813 Epoch 13/50 187/187 [==============================] - ETA: 0s - loss: 0.1713 - accuracy: 0.9411 Epoch 13: val_loss did not improve from 0.17171 187/187 [==============================] - 512s 3s/step - loss: 0.1713 - accuracy: 0.9411 - val_loss: 0.5337 - val_accuracy: 0.9094 Epoch 14/50 187/187 [==============================] - ETA: 0s - loss: 0.1335 - accuracy: 0.9599 Epoch 14: val_loss improved from 0.17171 to 0.10353, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 511s 3s/step - loss: 0.1335 - accuracy: 0.9599 - val_loss: 0.1035 - val_accuracy: 0.9781 Epoch 15/50 187/187 [==============================] - ETA: 0s - loss: 0.1293 - accuracy: 0.9482 Epoch 15: val_loss did not improve from 0.10353 187/187 [==============================] - 508s 3s/step - loss: 0.1293 - accuracy: 0.9482 - val_loss: 0.1207 - val_accuracy: 0.9594 Epoch 16/50 187/187 [==============================] - ETA: 0s - loss: 0.1008 - accuracy: 0.9659 Epoch 16: val_loss did not improve from 0.10353 187/187 [==============================] - 506s 3s/step - loss: 0.1008 - accuracy: 0.9659 - val_loss: 0.2903 - val_accuracy: 0.9281 Epoch 17/50 187/187 [==============================] - ETA: 0s - loss: 0.1004 - accuracy: 0.9629 Epoch 17: val_loss did not improve from 0.10353 187/187 [==============================] - 506s 3s/step - loss: 0.1004 - accuracy: 0.9629 - val_loss: 0.1218 - val_accuracy: 0.9563 Epoch 18/50 187/187 [==============================] - ETA: 0s - loss: 0.0914 - accuracy: 0.9679 Epoch 18: val_loss did not improve from 0.10353 187/187 [==============================] - 508s 3s/step - loss: 0.0914 - accuracy: 0.9679 - val_loss: 0.1768 - val_accuracy: 0.9406 Epoch 19/50 187/187 [==============================] - ETA: 0s - loss: 0.0961 - accuracy: 0.9672 Epoch 19: val_loss did not improve from 0.10353 187/187 [==============================] - 507s 3s/step - loss: 0.0961 - accuracy: 0.9672 - val_loss: 0.2162 - val_accuracy: 0.9031 Epoch 20/50 187/187 [==============================] - ETA: 0s - loss: 0.1060 - accuracy: 0.9629 Epoch 20: val_loss did not improve from 0.10353 187/187 [==============================] - 506s 3s/step - loss: 0.1060 - accuracy: 0.9629 - val_loss: 0.1717 - val_accuracy: 0.9375 Epoch 21/50 187/187 [==============================] - ETA: 0s - loss: 0.0834 - accuracy: 0.9753 Epoch 21: val_loss did not improve from 0.10353 187/187 [==============================] - 508s 3s/step - loss: 0.0834 - accuracy: 0.9753 - val_loss: 0.1652 - val_accuracy: 0.9656 Epoch 22/50 187/187 [==============================] - ETA: 0s - loss: 0.0828 - accuracy: 0.9742 Epoch 22: val_loss did not improve from 0.10353 187/187 [==============================] - 512s 3s/step - loss: 0.0828 - accuracy: 0.9742 - val_loss: 0.2636 - val_accuracy: 0.9312 Epoch 23/50 187/187 [==============================] - ETA: 0s - loss: 0.0769 - accuracy: 0.9796 Epoch 23: val_loss improved from 0.10353 to 0.08339, saving model to clf-resnet-weights.hdf5 187/187 [==============================] - 510s 3s/step - loss: 0.0769 - accuracy: 0.9796 - val_loss: 0.0834 - val_accuracy: 0.9750 Epoch 24/50 187/187 [==============================] - ETA: 0s - loss: 0.0577 - accuracy: 0.9816 Epoch 24: val_loss did not improve from 0.08339 187/187 [==============================] - 509s 3s/step - loss: 0.0577 - accuracy: 0.9816 - val_loss: 0.2484 - val_accuracy: 0.9375 Epoch 25/50 187/187 [==============================] - ETA: 0s - loss: 0.0802 - accuracy: 0.9719 Epoch 25: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0802 - accuracy: 0.9719 - val_loss: 0.1346 - val_accuracy: 0.9594 Epoch 26/50 187/187 [==============================] - ETA: 0s - loss: 0.0269 - accuracy: 0.9913 Epoch 26: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0269 - accuracy: 0.9913 - val_loss: 0.1674 - val_accuracy: 0.9656 Epoch 27/50 187/187 [==============================] - ETA: 0s - loss: 0.0628 - accuracy: 0.9783 Epoch 27: val_loss did not improve from 0.08339 187/187 [==============================] - 506s 3s/step - loss: 0.0628 - accuracy: 0.9783 - val_loss: 0.2157 - val_accuracy: 0.9406 Epoch 28/50 187/187 [==============================] - ETA: 0s - loss: 0.0791 - accuracy: 0.9742 Epoch 28: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0791 - accuracy: 0.9742 - val_loss: 0.1559 - val_accuracy: 0.9563 Epoch 29/50 187/187 [==============================] - ETA: 0s - loss: 0.0532 - accuracy: 0.9846 Epoch 29: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0532 - accuracy: 0.9846 - val_loss: 0.1498 - val_accuracy: 0.9469 Epoch 30/50 187/187 [==============================] - ETA: 0s - loss: 0.0643 - accuracy: 0.9829 Epoch 30: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0643 - accuracy: 0.9829 - val_loss: 0.1310 - val_accuracy: 0.9594 Epoch 31/50 187/187 [==============================] - ETA: 0s - loss: 0.0594 - accuracy: 0.9823 Epoch 31: val_loss did not improve from 0.08339 187/187 [==============================] - 507s 3s/step - loss: 0.0594 - accuracy: 0.9823 - val_loss: 0.1268 - val_accuracy: 0.9563 Epoch 32/50 187/187 [==============================] - ETA: 0s - loss: 0.0771 - accuracy: 0.9739 Epoch 32: val_loss did not improve from 0.08339 187/187 [==============================] - 509s 3s/step - loss: 0.0771 - accuracy: 0.9739 - val_loss: 0.3002 - val_accuracy: 0.9531 Epoch 33/50 187/187 [==============================] - ETA: 0s - loss: 0.0742 - accuracy: 0.9783 Epoch 33: val_loss did not improve from 0.08339 187/187 [==============================] - 510s 3s/step - loss: 0.0742 - accuracy: 0.9783 - val_loss: 0.6265 - val_accuracy: 0.8875 Epoch 34/50 187/187 [==============================] - ETA: 0s - loss: 0.0647 - accuracy: 0.9773 Epoch 34: val_loss did not improve from 0.08339 187/187 [==============================] - 509s 3s/step - loss: 0.0647 - accuracy: 0.9773 - val_loss: 0.1176 - val_accuracy: 0.9594 Epoch 35/50 187/187 [==============================] - ETA: 0s - loss: 0.0489 - accuracy: 0.9856 Epoch 35: val_loss did not improve from 0.08339 187/187 [==============================] - 508s 3s/step - loss: 0.0489 - accuracy: 0.9856 - val_loss: 0.2906 - val_accuracy: 0.8969 Epoch 36/50 187/187 [==============================] - ETA: 0s - loss: 0.0451 - accuracy: 0.9866 Epoch 36: val_loss did not improve from 0.08339 187/187 [==============================] - 514s 3s/step - loss: 0.0451 - accuracy: 0.9866 - val_loss: 0.1928 - val_accuracy: 0.9438 Epoch 37/50 187/187 [==============================] - ETA: 0s - loss: 0.0451 - accuracy: 0.9846 Epoch 37: val_loss did not improve from 0.08339 187/187 [==============================] - 514s 3s/step - loss: 0.0451 - accuracy: 0.9846 - val_loss: 0.1868 - val_accuracy: 0.9594 Epoch 38/50 187/187 [==============================] - ETA: 0s - loss: 0.0179 - accuracy: 0.9953 Epoch 38: val_loss did not improve from 0.08339 187/187 [==============================] - 516s 3s/step - loss: 0.0179 - accuracy: 0.9953 - val_loss: 0.1485 - val_accuracy: 0.9656 Epoch 38: early stopping
# saving model achitecture in json file
model_json = model.to_json()
with open("clf-resnet-model.json", "w") as json_file:
    json_file.write(model_json)
h.history.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
plt.figure(figsize=(12,5))
plt.subplot(1,2,1)
plt.plot(h.history['loss']);
plt.plot(h.history['val_loss']);
plt.title("Classification Model LOSS");
plt.ylabel("loss");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
plt.subplot(1,2,2)
plt.plot(h.history['accuracy']);
plt.plot(h.history['val_accuracy']);
plt.title("Classification Model Acc");
plt.ylabel("Accuracy");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
_, acc = model.evaluate(test_generator)
print("Test accuracy : {} %".format(acc*100))
37/37 [==============================] - 25s 674ms/step - loss: 0.2195 - accuracy: 0.9424 Test accuracy : 94.23728585243225 %
prediction = model.predict(test_generator)
pred = np.argmax(prediction, axis=1)
#pred = np.asarray(pred).astype('str')
original = np.asarray(test['mask']).astype('int')
from sklearn.metrics import accuracy_score, confusion_matrix, classification_report
accuracy = accuracy_score(original, pred)
print(accuracy)
cm = confusion_matrix(original, pred)
report = classification_report(original, pred, labels = [0,1])
print(report)
plt.figure(figsize = (5,5))
sns.heatmap(cm, annot=True);
37/37 [==============================] - 25s 663ms/step
0.9423728813559322
              precision    recall  f1-score   support
           0       0.96      0.95      0.95       378
           1       0.91      0.93      0.92       212
    accuracy                           0.94       590
   macro avg       0.94      0.94      0.94       590
weighted avg       0.94      0.94      0.94       590
brain_df_mask = brain_df[brain_df['mask'] == 1]
brain_df_mask.shape
(1373, 4)
# creating test, train and val sets
X_train, X_val = train_test_split(brain_df_mask, test_size=0.15)
X_test, X_val = train_test_split(X_val, test_size=0.5)
print("Train size is {}, valid size is {} & test size is {}".format(len(X_train), len(X_val), len(X_test)))
train_ids = list(X_train.image_path)
train_mask = list(X_train.mask_path)
val_ids = list(X_val.image_path)
val_mask= list(X_val.mask_path)
Train size is 1167, valid size is 103 & test size is 103
class DataGenerator(tf.keras.utils.Sequence):
  def __init__(self, ids , mask, image_dir = './', batch_size = 16, img_h = 256, img_w = 256, shuffle = True):
    self.ids = ids
    self.mask = mask
    self.image_dir = image_dir
    self.batch_size = batch_size
    self.img_h = img_h
    self.img_w = img_w
    self.shuffle = shuffle
    self.on_epoch_end()
  def __len__(self):
    'Get the number of batches per epoch'
    return int(np.floor(len(self.ids)) / self.batch_size)
  def __getitem__(self, index):
    'Generate a batch of data'
    #generate index of batch_size length
    indexes = self.indexes[index* self.batch_size : (index+1) * self.batch_size]
    #get the ImageId corresponding to the indexes created above based on batch size
    list_ids = [self.ids[i] for i in indexes]
    #get the MaskId corresponding to the indexes created above based on batch size
    list_mask = [self.mask[i] for i in indexes]
    #generate data for the X(features) and y(label)
    X, y = self.__data_generation(list_ids, list_mask)
    #returning the data
    return X, y
  def on_epoch_end(self):
    'Used for updating the indices after each epoch, once at the beginning as well as at the end of each epoch'
    
    #getting the array of indices based on the input dataframe
    self.indexes = np.arange(len(self.ids))
    #if shuffle is true, shuffle the indices
    if self.shuffle:
      np.random.shuffle(self.indexes)
  def __data_generation(self, list_ids, list_mask):
    'generate the data corresponding the indexes in a given batch of images'
    # create empty arrays of shape (batch_size,height,width,depth) 
    #Depth is 3 for input and depth is taken as 1 for output becasue mask consist only of 1 channel.
    X = np.empty((self.batch_size, self.img_h, self.img_w, 3))
    y = np.empty((self.batch_size, self.img_h, self.img_w, 1))
    #iterate through the dataframe rows, whose size is equal to the batch_size
    for i in range(len(list_ids)):
      #path of the image
      img_path = str(list_ids[i])
      
      #mask path
      mask_path = str(list_mask[i])
      
      #reading the original image and the corresponding mask image
      img = io.imread(img_path)
      mask = io.imread(mask_path)
      #resizing and coverting them to array of type float64
      img = cv2.resize(img,(self.img_h,self.img_w))
      img = np.array(img, dtype = np.float64)
      
      mask = cv2.resize(mask,(self.img_h,self.img_w))
      mask = np.array(mask, dtype = np.float64)
      #standardising 
      img -= img.mean()
      img /= img.std()
      
      mask -= mask.mean()
      mask /= mask.std()
      
      #Adding image to the empty array
      X[i,] = img
      
      #expanding the dimnesion of the image from (256,256) to (256,256,1)
      y[i,] = np.expand_dims(mask, axis = 2)
    
    #normalizing y
    y = (y > 0).astype(int)
    return X, y
train_data = DataGenerator(train_ids, train_mask)
val_data = DataGenerator(val_ids, val_mask)
It's been a while since the inception of U-Net, the network was initially designed to do medical image segmentation but since then it’s been used for all sorts of segmentation tasks. In my limited experience, I’ve always found that segmentation works better than object detection (given that you have the labeled data). The probable reason for segmentation being better than detection is that it is learning to identify each pixel as part of the object or not whereas detection tries to learn the four sets of coordinates surrounding the object (a much tougher and error-prone task to optimize).

To understand the architecture of U-Net let’s understand the given task first. Given an input image the network should try to generate a segmentation output mask which means each pixel should be classified as the desired object or not (look at the figure below). So, the idea behind U-net was that, if we feed the image to an encoder that keeps decreasing the spatial size of the feature block; after sufficient training, the network will generalize to store only the important features and discard away less useful data. Finally, the output of the encoder followed by a decoder will generate the desired output mask. The problem was that the decoder layers were not getting enough context in order to generate the segmentation mask from the encoder output.
The great idea introduced in the U-Net paper to solve the context issue was to add skip connections from the encoder to the decoder before each size-reduction step. Given below is the architecture of the U-Net, we can see that after applying two Conv blocks image is reduced by half, and from each Conv block (2 Conv blocks), there is a skip connection that takes the features from the encoder and concatenates them to the decoder thus giving the decoder enough context to generate proper segmentation mask. If we replace the operation of concatenation with addition we get the network called LinkNet. LinkNet performs similar to the U-Net (in some cases even beating the U-Net).

ResUnet is a very interesting idea that takes the performance gain of Residual networks and uses it with the U-Net. Given below is the architecture of ResUnet. In my testing, I’ve found that it is a very capable network but with a slightly large number of parameters.
# lets create model now
def resblock(X, f):
    '''
    function for creating res block
    '''
    X_copy = X  #copy of input
    
    # main path
    X = Conv2D(f, kernel_size=(1,1), kernel_initializer='he_normal')(X)
    X = BatchNormalization()(X)
    X = Activation('relu')(X)
    
    X = Conv2D(f, kernel_size=(3,3), padding='same', kernel_initializer='he_normal')(X)
    X = BatchNormalization()(X)
    
    # shortcut path
    X_copy = Conv2D(f, kernel_size=(1,1), kernel_initializer='he_normal')(X_copy)
    X_copy = BatchNormalization()(X_copy)
    
    # Adding the output from main path and short path together
    X = Add()([X, X_copy])
    X = Activation('relu')(X)
    
    return X
def upsample_concat(x, skip):
    '''
    funtion for upsampling image
    '''
    X = UpSampling2D((2,2))(x)
    merge = Concatenate()([X, skip])
    
    return merge
input_shape = (256,256,3)
X_input = Input(input_shape) #iniating tensor of input shape
# Stage 1
conv_1 = Conv2D(16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(X_input)
conv_1 = BatchNormalization()(conv_1)
conv_1 = Conv2D(16, 3, activation='relu', padding='same', kernel_initializer='he_normal')(conv_1)
conv_1 = BatchNormalization()(conv_1)
pool_1 = MaxPool2D((2,2))(conv_1)
# stage 2
conv_2 = resblock(pool_1, 32)
pool_2 = MaxPool2D((2,2))(conv_2)
# Stage 3
conv_3 = resblock(pool_2, 64)
pool_3 = MaxPool2D((2,2))(conv_3)
# Stage 4
conv_4 = resblock(pool_3, 128)
pool_4 = MaxPool2D((2,2))(conv_4)
# Stage 5 (bottle neck)
conv_5 = resblock(pool_4, 256)
# Upsample Stage 1
up_1 = upsample_concat(conv_5, conv_4)
up_1 = resblock(up_1, 128)
# Upsample Stage 2
up_2 = upsample_concat(up_1, conv_3)
up_2 = resblock(up_2, 64)
# Upsample Stage 3
up_3 = upsample_concat(up_2, conv_2)
up_3 = resblock(up_3, 32)
# Upsample Stage 4
up_4 = upsample_concat(up_3, conv_1)
up_4 = resblock(up_4, 16)
# final output
out = Conv2D(1, (1,1), kernel_initializer='he_normal', padding='same', activation='sigmoid')(up_4)
seg_model = Model(X_input, out)
seg_model.summary()
Model: "model_1"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_2 (InputLayer)           [(None, 256, 256, 3  0           []                               
                                )]                                                                
                                                                                                  
 conv2d (Conv2D)                (None, 256, 256, 16  448         ['input_2[0][0]']                
                                )                                                                 
                                                                                                  
 batch_normalization (BatchNorm  (None, 256, 256, 16  64         ['conv2d[0][0]']                 
 alization)                     )                                                                 
                                                                                                  
 conv2d_1 (Conv2D)              (None, 256, 256, 16  2320        ['batch_normalization[0][0]']    
                                )                                                                 
                                                                                                  
 batch_normalization_1 (BatchNo  (None, 256, 256, 16  64         ['conv2d_1[0][0]']               
 rmalization)                   )                                                                 
                                                                                                  
 max_pooling2d (MaxPooling2D)   (None, 128, 128, 16  0           ['batch_normalization_1[0][0]']  
                                )                                                                 
                                                                                                  
 conv2d_2 (Conv2D)              (None, 128, 128, 32  544         ['max_pooling2d[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_2 (BatchNo  (None, 128, 128, 32  128        ['conv2d_2[0][0]']               
 rmalization)                   )                                                                 
                                                                                                  
 activation (Activation)        (None, 128, 128, 32  0           ['batch_normalization_2[0][0]']  
                                )                                                                 
                                                                                                  
 conv2d_3 (Conv2D)              (None, 128, 128, 32  9248        ['activation[0][0]']             
                                )                                                                 
                                                                                                  
 conv2d_4 (Conv2D)              (None, 128, 128, 32  544         ['max_pooling2d[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_3 (BatchNo  (None, 128, 128, 32  128        ['conv2d_3[0][0]']               
 rmalization)                   )                                                                 
                                                                                                  
 batch_normalization_4 (BatchNo  (None, 128, 128, 32  128        ['conv2d_4[0][0]']               
 rmalization)                   )                                                                 
                                                                                                  
 add (Add)                      (None, 128, 128, 32  0           ['batch_normalization_3[0][0]',  
                                )                                 'batch_normalization_4[0][0]']  
                                                                                                  
 activation_1 (Activation)      (None, 128, 128, 32  0           ['add[0][0]']                    
                                )                                                                 
                                                                                                  
 max_pooling2d_1 (MaxPooling2D)  (None, 64, 64, 32)  0           ['activation_1[0][0]']           
                                                                                                  
 conv2d_5 (Conv2D)              (None, 64, 64, 64)   2112        ['max_pooling2d_1[0][0]']        
                                                                                                  
 batch_normalization_5 (BatchNo  (None, 64, 64, 64)  256         ['conv2d_5[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_2 (Activation)      (None, 64, 64, 64)   0           ['batch_normalization_5[0][0]']  
                                                                                                  
 conv2d_6 (Conv2D)              (None, 64, 64, 64)   36928       ['activation_2[0][0]']           
                                                                                                  
 conv2d_7 (Conv2D)              (None, 64, 64, 64)   2112        ['max_pooling2d_1[0][0]']        
                                                                                                  
 batch_normalization_6 (BatchNo  (None, 64, 64, 64)  256         ['conv2d_6[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 batch_normalization_7 (BatchNo  (None, 64, 64, 64)  256         ['conv2d_7[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 add_1 (Add)                    (None, 64, 64, 64)   0           ['batch_normalization_6[0][0]',  
                                                                  'batch_normalization_7[0][0]']  
                                                                                                  
 activation_3 (Activation)      (None, 64, 64, 64)   0           ['add_1[0][0]']                  
                                                                                                  
 max_pooling2d_2 (MaxPooling2D)  (None, 32, 32, 64)  0           ['activation_3[0][0]']           
                                                                                                  
 conv2d_8 (Conv2D)              (None, 32, 32, 128)  8320        ['max_pooling2d_2[0][0]']        
                                                                                                  
 batch_normalization_8 (BatchNo  (None, 32, 32, 128)  512        ['conv2d_8[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_4 (Activation)      (None, 32, 32, 128)  0           ['batch_normalization_8[0][0]']  
                                                                                                  
 conv2d_9 (Conv2D)              (None, 32, 32, 128)  147584      ['activation_4[0][0]']           
                                                                                                  
 conv2d_10 (Conv2D)             (None, 32, 32, 128)  8320        ['max_pooling2d_2[0][0]']        
                                                                                                  
 batch_normalization_9 (BatchNo  (None, 32, 32, 128)  512        ['conv2d_9[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 batch_normalization_10 (BatchN  (None, 32, 32, 128)  512        ['conv2d_10[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_2 (Add)                    (None, 32, 32, 128)  0           ['batch_normalization_9[0][0]',  
                                                                  'batch_normalization_10[0][0]'] 
                                                                                                  
 activation_5 (Activation)      (None, 32, 32, 128)  0           ['add_2[0][0]']                  
                                                                                                  
 max_pooling2d_3 (MaxPooling2D)  (None, 16, 16, 128)  0          ['activation_5[0][0]']           
                                                                                                  
 conv2d_11 (Conv2D)             (None, 16, 16, 256)  33024       ['max_pooling2d_3[0][0]']        
                                                                                                  
 batch_normalization_11 (BatchN  (None, 16, 16, 256)  1024       ['conv2d_11[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_6 (Activation)      (None, 16, 16, 256)  0           ['batch_normalization_11[0][0]'] 
                                                                                                  
 conv2d_12 (Conv2D)             (None, 16, 16, 256)  590080      ['activation_6[0][0]']           
                                                                                                  
 conv2d_13 (Conv2D)             (None, 16, 16, 256)  33024       ['max_pooling2d_3[0][0]']        
                                                                                                  
 batch_normalization_12 (BatchN  (None, 16, 16, 256)  1024       ['conv2d_12[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 batch_normalization_13 (BatchN  (None, 16, 16, 256)  1024       ['conv2d_13[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_3 (Add)                    (None, 16, 16, 256)  0           ['batch_normalization_12[0][0]', 
                                                                  'batch_normalization_13[0][0]'] 
                                                                                                  
 activation_7 (Activation)      (None, 16, 16, 256)  0           ['add_3[0][0]']                  
                                                                                                  
 up_sampling2d (UpSampling2D)   (None, 32, 32, 256)  0           ['activation_7[0][0]']           
                                                                                                  
 concatenate (Concatenate)      (None, 32, 32, 384)  0           ['up_sampling2d[0][0]',          
                                                                  'activation_5[0][0]']           
                                                                                                  
 conv2d_14 (Conv2D)             (None, 32, 32, 128)  49280       ['concatenate[0][0]']            
                                                                                                  
 batch_normalization_14 (BatchN  (None, 32, 32, 128)  512        ['conv2d_14[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_8 (Activation)      (None, 32, 32, 128)  0           ['batch_normalization_14[0][0]'] 
                                                                                                  
 conv2d_15 (Conv2D)             (None, 32, 32, 128)  147584      ['activation_8[0][0]']           
                                                                                                  
 conv2d_16 (Conv2D)             (None, 32, 32, 128)  49280       ['concatenate[0][0]']            
                                                                                                  
 batch_normalization_15 (BatchN  (None, 32, 32, 128)  512        ['conv2d_15[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 batch_normalization_16 (BatchN  (None, 32, 32, 128)  512        ['conv2d_16[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_4 (Add)                    (None, 32, 32, 128)  0           ['batch_normalization_15[0][0]', 
                                                                  'batch_normalization_16[0][0]'] 
                                                                                                  
 activation_9 (Activation)      (None, 32, 32, 128)  0           ['add_4[0][0]']                  
                                                                                                  
 up_sampling2d_1 (UpSampling2D)  (None, 64, 64, 128)  0          ['activation_9[0][0]']           
                                                                                                  
 concatenate_1 (Concatenate)    (None, 64, 64, 192)  0           ['up_sampling2d_1[0][0]',        
                                                                  'activation_3[0][0]']           
                                                                                                  
 conv2d_17 (Conv2D)             (None, 64, 64, 64)   12352       ['concatenate_1[0][0]']          
                                                                                                  
 batch_normalization_17 (BatchN  (None, 64, 64, 64)  256         ['conv2d_17[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_10 (Activation)     (None, 64, 64, 64)   0           ['batch_normalization_17[0][0]'] 
                                                                                                  
 conv2d_18 (Conv2D)             (None, 64, 64, 64)   36928       ['activation_10[0][0]']          
                                                                                                  
 conv2d_19 (Conv2D)             (None, 64, 64, 64)   12352       ['concatenate_1[0][0]']          
                                                                                                  
 batch_normalization_18 (BatchN  (None, 64, 64, 64)  256         ['conv2d_18[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 batch_normalization_19 (BatchN  (None, 64, 64, 64)  256         ['conv2d_19[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_5 (Add)                    (None, 64, 64, 64)   0           ['batch_normalization_18[0][0]', 
                                                                  'batch_normalization_19[0][0]'] 
                                                                                                  
 activation_11 (Activation)     (None, 64, 64, 64)   0           ['add_5[0][0]']                  
                                                                                                  
 up_sampling2d_2 (UpSampling2D)  (None, 128, 128, 64  0          ['activation_11[0][0]']          
                                )                                                                 
                                                                                                  
 concatenate_2 (Concatenate)    (None, 128, 128, 96  0           ['up_sampling2d_2[0][0]',        
                                )                                 'activation_1[0][0]']           
                                                                                                  
 conv2d_20 (Conv2D)             (None, 128, 128, 32  3104        ['concatenate_2[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_20 (BatchN  (None, 128, 128, 32  128        ['conv2d_20[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 activation_12 (Activation)     (None, 128, 128, 32  0           ['batch_normalization_20[0][0]'] 
                                )                                                                 
                                                                                                  
 conv2d_21 (Conv2D)             (None, 128, 128, 32  9248        ['activation_12[0][0]']          
                                )                                                                 
                                                                                                  
 conv2d_22 (Conv2D)             (None, 128, 128, 32  3104        ['concatenate_2[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_21 (BatchN  (None, 128, 128, 32  128        ['conv2d_21[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 batch_normalization_22 (BatchN  (None, 128, 128, 32  128        ['conv2d_22[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_6 (Add)                    (None, 128, 128, 32  0           ['batch_normalization_21[0][0]', 
                                )                                 'batch_normalization_22[0][0]'] 
                                                                                                  
 activation_13 (Activation)     (None, 128, 128, 32  0           ['add_6[0][0]']                  
                                )                                                                 
                                                                                                  
 up_sampling2d_3 (UpSampling2D)  (None, 256, 256, 32  0          ['activation_13[0][0]']          
                                )                                                                 
                                                                                                  
 concatenate_3 (Concatenate)    (None, 256, 256, 48  0           ['up_sampling2d_3[0][0]',        
                                )                                 'batch_normalization_1[0][0]']  
                                                                                                  
 conv2d_23 (Conv2D)             (None, 256, 256, 16  784         ['concatenate_3[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_23 (BatchN  (None, 256, 256, 16  64         ['conv2d_23[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 activation_14 (Activation)     (None, 256, 256, 16  0           ['batch_normalization_23[0][0]'] 
                                )                                                                 
                                                                                                  
 conv2d_24 (Conv2D)             (None, 256, 256, 16  2320        ['activation_14[0][0]']          
                                )                                                                 
                                                                                                  
 conv2d_25 (Conv2D)             (None, 256, 256, 16  784         ['concatenate_3[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_24 (BatchN  (None, 256, 256, 16  64         ['conv2d_24[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 batch_normalization_25 (BatchN  (None, 256, 256, 16  64         ['conv2d_25[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_7 (Add)                    (None, 256, 256, 16  0           ['batch_normalization_24[0][0]', 
                                )                                 'batch_normalization_25[0][0]'] 
                                                                                                  
 activation_15 (Activation)     (None, 256, 256, 16  0           ['add_7[0][0]']                  
                                )                                                                 
                                                                                                  
 conv2d_26 (Conv2D)             (None, 256, 256, 1)  17          ['activation_15[0][0]']          
                                                                                                  
==================================================================================================
Total params: 1,210,513
Trainable params: 1,206,129
Non-trainable params: 4,384
__________________________________________________________________________________________________
from keras.losses import binary_crossentropy
epsilon = 1e-5
smooth = 1
def tversky(y_true, y_pred):
    y_true_pos = K.flatten(y_true)
    y_pred_pos = K.flatten(y_pred)
    true_pos = K.sum(y_true_pos * y_pred_pos)
    false_neg = K.sum(y_true_pos * (1-y_pred_pos))
    false_pos = K.sum((1-y_true_pos)*y_pred_pos)
    alpha = 0.7
    return (true_pos + smooth)/(true_pos + alpha*false_neg + (1-alpha)*false_pos + smooth)
def focal_tversky(y_true,y_pred):
    y_true = tf.cast(y_true, tf.float32)
    y_pred = tf.cast(y_pred, tf.float32)
    
    pt_1 = tversky(y_true, y_pred)
    gamma = 0.75
    return K.pow((1-pt_1), gamma)
def tversky_loss(y_true, y_pred):
    return 1 - tversky(y_true,y_pred)
# compling model and callbacks functions
adam = tf.keras.optimizers.Adam(lr = 0.05, epsilon = 0.1)
seg_model.compile(optimizer = adam, 
                  loss = focal_tversky, 
                  metrics = [tversky]
                 )
#callbacks
earlystopping = EarlyStopping(monitor='val_loss',
                              mode='min', 
                              verbose=1, 
                              patience=20
                             )
# save the best model with lower validation loss
checkpointer = ModelCheckpoint(filepath="ResUNet-segModel-weights.hdf5", 
                               verbose=1, 
                               save_best_only=True
                              )
reduce_lr = ReduceLROnPlateau(monitor='val_loss',
                              mode='min',
                              verbose=1,
                              patience=10,
                              min_delta=0.0001,
                              factor=0.2
                             )
WARNING:absl:`lr` is deprecated, please use `learning_rate` instead, or use the legacy optimizer, e.g.,tf.keras.optimizers.legacy.Adam.
h = seg_model.fit(train_data, 
                  epochs = 60, 
                  validation_data = val_data,
                  callbacks = [checkpointer, earlystopping, reduce_lr]
                 )
Epoch 1/60 72/72 [==============================] - ETA: 0s - loss: 0.9108 - tversky: 0.1171 Epoch 1: val_loss improved from inf to 0.90497, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 129s 2s/step - loss: 0.9108 - tversky: 0.1171 - val_loss: 0.9050 - val_tversky: 0.1246 - lr: 0.0010 Epoch 2/60 72/72 [==============================] - ETA: 0s - loss: 0.8959 - tversky: 0.1362 Epoch 2: val_loss improved from 0.90497 to 0.89721, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 127s 2s/step - loss: 0.8959 - tversky: 0.1362 - val_loss: 0.8972 - val_tversky: 0.1346 - lr: 0.0010 Epoch 3/60 72/72 [==============================] - ETA: 0s - loss: 0.8848 - tversky: 0.1505 Epoch 3: val_loss improved from 0.89721 to 0.88756, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 128s 2s/step - loss: 0.8848 - tversky: 0.1505 - val_loss: 0.8876 - val_tversky: 0.1470 - lr: 0.0010 Epoch 4/60 72/72 [==============================] - ETA: 0s - loss: 0.8758 - tversky: 0.1619 Epoch 4: val_loss improved from 0.88756 to 0.87454, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 121s 2s/step - loss: 0.8758 - tversky: 0.1619 - val_loss: 0.8745 - val_tversky: 0.1636 - lr: 0.0010 Epoch 5/60 72/72 [==============================] - ETA: 0s - loss: 0.8681 - tversky: 0.1718 Epoch 5: val_loss improved from 0.87454 to 0.86393, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 136s 2s/step - loss: 0.8681 - tversky: 0.1718 - val_loss: 0.8639 - val_tversky: 0.1771 - lr: 0.0010 Epoch 6/60 72/72 [==============================] - ETA: 0s - loss: 0.8603 - tversky: 0.1816 Epoch 6: val_loss improved from 0.86393 to 0.85728, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 131s 2s/step - loss: 0.8603 - tversky: 0.1816 - val_loss: 0.8573 - val_tversky: 0.1855 - lr: 0.0010 Epoch 7/60 72/72 [==============================] - ETA: 0s - loss: 0.8525 - tversky: 0.1915 Epoch 7: val_loss improved from 0.85728 to 0.85344, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 137s 2s/step - loss: 0.8525 - tversky: 0.1915 - val_loss: 0.8534 - val_tversky: 0.1903 - lr: 0.0010 Epoch 8/60 72/72 [==============================] - ETA: 0s - loss: 0.8438 - tversky: 0.2024 Epoch 8: val_loss improved from 0.85344 to 0.84383, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 134s 2s/step - loss: 0.8438 - tversky: 0.2024 - val_loss: 0.8438 - val_tversky: 0.2025 - lr: 0.0010 Epoch 9/60 72/72 [==============================] - ETA: 0s - loss: 0.8354 - tversky: 0.2130 Epoch 9: val_loss improved from 0.84383 to 0.83372, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 135s 2s/step - loss: 0.8354 - tversky: 0.2130 - val_loss: 0.8337 - val_tversky: 0.2152 - lr: 0.0010 Epoch 10/60 72/72 [==============================] - ETA: 0s - loss: 0.8258 - tversky: 0.2250 Epoch 10: val_loss improved from 0.83372 to 0.81608, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 135s 2s/step - loss: 0.8258 - tversky: 0.2250 - val_loss: 0.8161 - val_tversky: 0.2373 - lr: 0.0010 Epoch 11/60 72/72 [==============================] - ETA: 0s - loss: 0.8125 - tversky: 0.2416 Epoch 11: val_loss improved from 0.81608 to 0.80185, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 131s 2s/step - loss: 0.8125 - tversky: 0.2416 - val_loss: 0.8018 - val_tversky: 0.2547 - lr: 0.0010 Epoch 12/60 72/72 [==============================] - ETA: 0s - loss: 0.7941 - tversky: 0.2643 Epoch 12: val_loss improved from 0.80185 to 0.78781, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 133s 2s/step - loss: 0.7941 - tversky: 0.2643 - val_loss: 0.7878 - val_tversky: 0.2721 - lr: 0.0010 Epoch 13/60 72/72 [==============================] - ETA: 0s - loss: 0.7711 - tversky: 0.2925 Epoch 13: val_loss improved from 0.78781 to 0.75006, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 129s 2s/step - loss: 0.7711 - tversky: 0.2925 - val_loss: 0.7501 - val_tversky: 0.3184 - lr: 0.0010 Epoch 14/60 72/72 [==============================] - ETA: 0s - loss: 0.7396 - tversky: 0.3308 Epoch 14: val_loss improved from 0.75006 to 0.71277, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 127s 2s/step - loss: 0.7396 - tversky: 0.3308 - val_loss: 0.7128 - val_tversky: 0.3629 - lr: 0.0010 Epoch 15/60 72/72 [==============================] - ETA: 0s - loss: 0.7021 - tversky: 0.3753 Epoch 15: val_loss improved from 0.71277 to 0.67982, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 120s 2s/step - loss: 0.7021 - tversky: 0.3753 - val_loss: 0.6798 - val_tversky: 0.4011 - lr: 0.0010 Epoch 16/60 72/72 [==============================] - ETA: 0s - loss: 0.6574 - tversky: 0.4276 Epoch 16: val_loss improved from 0.67982 to 0.64098, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 122s 2s/step - loss: 0.6574 - tversky: 0.4276 - val_loss: 0.6410 - val_tversky: 0.4470 - lr: 0.0010 Epoch 17/60 72/72 [==============================] - ETA: 0s - loss: 0.6182 - tversky: 0.4721 Epoch 17: val_loss improved from 0.64098 to 0.58106, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 124s 2s/step - loss: 0.6182 - tversky: 0.4721 - val_loss: 0.5811 - val_tversky: 0.5146 - lr: 0.0010 Epoch 18/60 72/72 [==============================] - ETA: 0s - loss: 0.5748 - tversky: 0.5211 Epoch 18: val_loss improved from 0.58106 to 0.54863, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 122s 2s/step - loss: 0.5748 - tversky: 0.5211 - val_loss: 0.5486 - val_tversky: 0.5497 - lr: 0.0010 Epoch 19/60 72/72 [==============================] - ETA: 0s - loss: 0.5401 - tversky: 0.5587 Epoch 19: val_loss improved from 0.54863 to 0.51370, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 126s 2s/step - loss: 0.5401 - tversky: 0.5587 - val_loss: 0.5137 - val_tversky: 0.5873 - lr: 0.0010 Epoch 20/60 72/72 [==============================] - ETA: 0s - loss: 0.5144 - tversky: 0.5859 Epoch 20: val_loss improved from 0.51370 to 0.48971, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 119s 2s/step - loss: 0.5144 - tversky: 0.5859 - val_loss: 0.4897 - val_tversky: 0.6105 - lr: 0.0010 Epoch 21/60 72/72 [==============================] - ETA: 0s - loss: 0.4952 - tversky: 0.6066 Epoch 21: val_loss improved from 0.48971 to 0.48735, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 123s 2s/step - loss: 0.4952 - tversky: 0.6066 - val_loss: 0.4873 - val_tversky: 0.6153 - lr: 0.0010 Epoch 22/60 72/72 [==============================] - ETA: 0s - loss: 0.4764 - tversky: 0.6263 Epoch 22: val_loss improved from 0.48735 to 0.47520, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 127s 2s/step - loss: 0.4764 - tversky: 0.6263 - val_loss: 0.4752 - val_tversky: 0.6275 - lr: 0.0010 Epoch 23/60 72/72 [==============================] - ETA: 0s - loss: 0.4607 - tversky: 0.6426 Epoch 23: val_loss improved from 0.47520 to 0.45390, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 124s 2s/step - loss: 0.4607 - tversky: 0.6426 - val_loss: 0.4539 - val_tversky: 0.6505 - lr: 0.0010 Epoch 24/60 72/72 [==============================] - ETA: 0s - loss: 0.4442 - tversky: 0.6590 Epoch 24: val_loss improved from 0.45390 to 0.44585, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 119s 2s/step - loss: 0.4442 - tversky: 0.6590 - val_loss: 0.4458 - val_tversky: 0.6586 - lr: 0.0010 Epoch 25/60 72/72 [==============================] - ETA: 0s - loss: 0.4249 - tversky: 0.6790 Epoch 25: val_loss did not improve from 0.44585 72/72 [==============================] - 121s 2s/step - loss: 0.4249 - tversky: 0.6790 - val_loss: 0.4510 - val_tversky: 0.6516 - lr: 0.0010 Epoch 26/60 72/72 [==============================] - ETA: 0s - loss: 0.4112 - tversky: 0.6926 Epoch 26: val_loss improved from 0.44585 to 0.41146, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 116s 2s/step - loss: 0.4112 - tversky: 0.6926 - val_loss: 0.4115 - val_tversky: 0.6923 - lr: 0.0010 Epoch 27/60 72/72 [==============================] - ETA: 0s - loss: 0.4021 - tversky: 0.7008 Epoch 27: val_loss improved from 0.41146 to 0.40334, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 118s 2s/step - loss: 0.4021 - tversky: 0.7008 - val_loss: 0.4033 - val_tversky: 0.7011 - lr: 0.0010 Epoch 28/60 72/72 [==============================] - ETA: 0s - loss: 0.3935 - tversky: 0.7099 Epoch 28: val_loss improved from 0.40334 to 0.38929, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 120s 2s/step - loss: 0.3935 - tversky: 0.7099 - val_loss: 0.3893 - val_tversky: 0.7152 - lr: 0.0010 Epoch 29/60 72/72 [==============================] - ETA: 0s - loss: 0.3785 - tversky: 0.7240 Epoch 29: val_loss did not improve from 0.38929 72/72 [==============================] - 121s 2s/step - loss: 0.3785 - tversky: 0.7240 - val_loss: 0.3896 - val_tversky: 0.7143 - lr: 0.0010 Epoch 30/60 72/72 [==============================] - ETA: 0s - loss: 0.3718 - tversky: 0.7309 Epoch 30: val_loss improved from 0.38929 to 0.37345, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 116s 2s/step - loss: 0.3718 - tversky: 0.7309 - val_loss: 0.3735 - val_tversky: 0.7302 - lr: 0.0010 Epoch 31/60 72/72 [==============================] - ETA: 0s - loss: 0.3632 - tversky: 0.7392 Epoch 31: val_loss did not improve from 0.37345 72/72 [==============================] - 120s 2s/step - loss: 0.3632 - tversky: 0.7392 - val_loss: 0.3815 - val_tversky: 0.7213 - lr: 0.0010 Epoch 32/60 72/72 [==============================] - ETA: 0s - loss: 0.3505 - tversky: 0.7511 Epoch 32: val_loss improved from 0.37345 to 0.35426, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 116s 2s/step - loss: 0.3505 - tversky: 0.7511 - val_loss: 0.3543 - val_tversky: 0.7485 - lr: 0.0010 Epoch 33/60 72/72 [==============================] - ETA: 0s - loss: 0.3475 - tversky: 0.7537 Epoch 33: val_loss improved from 0.35426 to 0.35403, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 117s 2s/step - loss: 0.3475 - tversky: 0.7537 - val_loss: 0.3540 - val_tversky: 0.7482 - lr: 0.0010 Epoch 34/60 72/72 [==============================] - ETA: 0s - loss: 0.3326 - tversky: 0.7683 Epoch 34: val_loss improved from 0.35403 to 0.35246, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 120s 2s/step - loss: 0.3326 - tversky: 0.7683 - val_loss: 0.3525 - val_tversky: 0.7501 - lr: 0.0010 Epoch 35/60 72/72 [==============================] - ETA: 0s - loss: 0.3302 - tversky: 0.7699 Epoch 35: val_loss improved from 0.35246 to 0.33997, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 123s 2s/step - loss: 0.3302 - tversky: 0.7699 - val_loss: 0.3400 - val_tversky: 0.7609 - lr: 0.0010 Epoch 36/60 72/72 [==============================] - ETA: 0s - loss: 0.3185 - tversky: 0.7814 Epoch 36: val_loss did not improve from 0.33997 72/72 [==============================] - 120s 2s/step - loss: 0.3185 - tversky: 0.7814 - val_loss: 0.3501 - val_tversky: 0.7523 - lr: 0.0010 Epoch 37/60 72/72 [==============================] - ETA: 0s - loss: 0.3156 - tversky: 0.7838 Epoch 37: val_loss did not improve from 0.33997 72/72 [==============================] - 127s 2s/step - loss: 0.3156 - tversky: 0.7838 - val_loss: 0.3525 - val_tversky: 0.7488 - lr: 0.0010 Epoch 38/60 72/72 [==============================] - ETA: 0s - loss: 0.3091 - tversky: 0.7899 Epoch 38: val_loss improved from 0.33997 to 0.32281, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 129s 2s/step - loss: 0.3091 - tversky: 0.7899 - val_loss: 0.3228 - val_tversky: 0.7771 - lr: 0.0010 Epoch 39/60 72/72 [==============================] - ETA: 0s - loss: 0.2996 - tversky: 0.7978 Epoch 39: val_loss did not improve from 0.32281 72/72 [==============================] - 128s 2s/step - loss: 0.2996 - tversky: 0.7978 - val_loss: 0.3375 - val_tversky: 0.7633 - lr: 0.0010 Epoch 40/60 72/72 [==============================] - ETA: 0s - loss: 0.2943 - tversky: 0.8029 Epoch 40: val_loss did not improve from 0.32281 72/72 [==============================] - 125s 2s/step - loss: 0.2943 - tversky: 0.8029 - val_loss: 0.3303 - val_tversky: 0.7698 - lr: 0.0010 Epoch 41/60 72/72 [==============================] - ETA: 0s - loss: 0.2852 - tversky: 0.8112 Epoch 41: val_loss improved from 0.32281 to 0.30088, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 127s 2s/step - loss: 0.2852 - tversky: 0.8112 - val_loss: 0.3009 - val_tversky: 0.7981 - lr: 0.0010 Epoch 42/60 72/72 [==============================] - ETA: 0s - loss: 0.2811 - tversky: 0.8148 Epoch 42: val_loss did not improve from 0.30088 72/72 [==============================] - 126s 2s/step - loss: 0.2811 - tversky: 0.8148 - val_loss: 0.3035 - val_tversky: 0.7949 - lr: 0.0010 Epoch 43/60 72/72 [==============================] - ETA: 0s - loss: 0.2722 - tversky: 0.8227 Epoch 43: val_loss did not improve from 0.30088 72/72 [==============================] - 128s 2s/step - loss: 0.2722 - tversky: 0.8227 - val_loss: 0.3057 - val_tversky: 0.7939 - lr: 0.0010 Epoch 44/60 72/72 [==============================] - ETA: 0s - loss: 0.2717 - tversky: 0.8227 Epoch 44: val_loss improved from 0.30088 to 0.29223, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 121s 2s/step - loss: 0.2717 - tversky: 0.8227 - val_loss: 0.2922 - val_tversky: 0.8050 - lr: 0.0010 Epoch 45/60 72/72 [==============================] - ETA: 0s - loss: 0.2557 - tversky: 0.8370 Epoch 45: val_loss improved from 0.29223 to 0.28744, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 126s 2s/step - loss: 0.2557 - tversky: 0.8370 - val_loss: 0.2874 - val_tversky: 0.8097 - lr: 0.0010 Epoch 46/60 72/72 [==============================] - ETA: 0s - loss: 0.2557 - tversky: 0.8367 Epoch 46: val_loss did not improve from 0.28744 72/72 [==============================] - 130s 2s/step - loss: 0.2557 - tversky: 0.8367 - val_loss: 0.2888 - val_tversky: 0.8082 - lr: 0.0010 Epoch 47/60 72/72 [==============================] - ETA: 0s - loss: 0.2544 - tversky: 0.8383 Epoch 47: val_loss improved from 0.28744 to 0.27332, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 127s 2s/step - loss: 0.2544 - tversky: 0.8383 - val_loss: 0.2733 - val_tversky: 0.8214 - lr: 0.0010 Epoch 48/60 72/72 [==============================] - ETA: 0s - loss: 0.2467 - tversky: 0.8443 Epoch 48: val_loss did not improve from 0.27332 72/72 [==============================] - 128s 2s/step - loss: 0.2467 - tversky: 0.8443 - val_loss: 0.2810 - val_tversky: 0.8136 - lr: 0.0010 Epoch 49/60 72/72 [==============================] - ETA: 0s - loss: 0.2521 - tversky: 0.8395 Epoch 49: val_loss did not improve from 0.27332 72/72 [==============================] - 122s 2s/step - loss: 0.2521 - tversky: 0.8395 - val_loss: 0.2860 - val_tversky: 0.8108 - lr: 0.0010 Epoch 50/60 72/72 [==============================] - ETA: 0s - loss: 0.2421 - tversky: 0.8481 Epoch 50: val_loss improved from 0.27332 to 0.26800, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 118s 2s/step - loss: 0.2421 - tversky: 0.8481 - val_loss: 0.2680 - val_tversky: 0.8271 - lr: 0.0010 Epoch 51/60 72/72 [==============================] - ETA: 0s - loss: 0.2391 - tversky: 0.8510 Epoch 51: val_loss did not improve from 0.26800 72/72 [==============================] - 117s 2s/step - loss: 0.2391 - tversky: 0.8510 - val_loss: 0.2798 - val_tversky: 0.8158 - lr: 0.0010 Epoch 52/60 72/72 [==============================] - ETA: 0s - loss: 0.2370 - tversky: 0.8527 Epoch 52: val_loss improved from 0.26800 to 0.26463, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 123s 2s/step - loss: 0.2370 - tversky: 0.8527 - val_loss: 0.2646 - val_tversky: 0.8296 - lr: 0.0010 Epoch 53/60 72/72 [==============================] - ETA: 0s - loss: 0.2355 - tversky: 0.8537 Epoch 53: val_loss improved from 0.26463 to 0.24669, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 129s 2s/step - loss: 0.2355 - tversky: 0.8537 - val_loss: 0.2467 - val_tversky: 0.8449 - lr: 0.0010 Epoch 54/60 72/72 [==============================] - ETA: 0s - loss: 0.2283 - tversky: 0.8598 Epoch 54: val_loss did not improve from 0.24669 72/72 [==============================] - 142s 2s/step - loss: 0.2283 - tversky: 0.8598 - val_loss: 0.2695 - val_tversky: 0.8252 - lr: 0.0010 Epoch 55/60 72/72 [==============================] - ETA: 0s - loss: 0.2282 - tversky: 0.8597 Epoch 55: val_loss did not improve from 0.24669 72/72 [==============================] - 184s 3s/step - loss: 0.2282 - tversky: 0.8597 - val_loss: 0.2771 - val_tversky: 0.8187 - lr: 0.0010 Epoch 56/60 72/72 [==============================] - ETA: 0s - loss: 0.2211 - tversky: 0.8655 Epoch 56: val_loss did not improve from 0.24669 72/72 [==============================] - 183s 3s/step - loss: 0.2211 - tversky: 0.8655 - val_loss: 0.2653 - val_tversky: 0.8293 - lr: 0.0010 Epoch 57/60 72/72 [==============================] - ETA: 0s - loss: 0.2212 - tversky: 0.8655 Epoch 57: val_loss improved from 0.24669 to 0.24410, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 190s 3s/step - loss: 0.2212 - tversky: 0.8655 - val_loss: 0.2441 - val_tversky: 0.8466 - lr: 0.0010 Epoch 58/60 72/72 [==============================] - ETA: 0s - loss: 0.2186 - tversky: 0.8677 Epoch 58: val_loss did not improve from 0.24410 72/72 [==============================] - 176s 2s/step - loss: 0.2186 - tversky: 0.8677 - val_loss: 0.2607 - val_tversky: 0.8332 - lr: 0.0010 Epoch 59/60 72/72 [==============================] - ETA: 0s - loss: 0.2124 - tversky: 0.8725 Epoch 59: val_loss did not improve from 0.24410 72/72 [==============================] - 167s 2s/step - loss: 0.2124 - tversky: 0.8725 - val_loss: 0.2543 - val_tversky: 0.8386 - lr: 0.0010 Epoch 60/60 72/72 [==============================] - ETA: 0s - loss: 0.2145 - tversky: 0.8710 Epoch 60: val_loss improved from 0.24410 to 0.23691, saving model to ResUNet-segModel-weights.hdf5 72/72 [==============================] - 170s 2s/step - loss: 0.2145 - tversky: 0.8710 - val_loss: 0.2369 - val_tversky: 0.8520 - lr: 0.0010
# saving model achitecture in json file
seg_model_json = seg_model.to_json()
with open("ResUNet-seg-model.json", "w") as json_file:
    json_file.write(seg_model_json)
h.history.keys()
dict_keys(['loss', 'tversky', 'val_loss', 'val_tversky', 'lr'])
plt.figure(figsize=(12,5))
plt.subplot(1,2,1)
plt.plot(h.history['loss']);
plt.plot(h.history['val_loss']);
plt.title("SEG Model focal tversky Loss");
plt.ylabel("focal tversky loss");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
plt.subplot(1,2,2)
plt.plot(h.history['tversky']);
plt.plot(h.history['val_tversky']);
plt.title("SEG Model tversky score");
plt.ylabel("tversky Accuracy");
plt.xlabel("Epochs");
plt.legend(['train', 'val']);
test_ids = list(X_test.image_path)
test_mask = list(X_test.mask_path)
test_data = DataGenerator(test_ids, test_mask)
_, tv = seg_model.evaluate(test_data)
print("Segmentation tversky is {:.2f}%".format(tv*100))
6/6 [==============================] - 3s 570ms/step - loss: 0.2409 - tversky: 0.8498 Segmentation tversky is 84.98%
def prediction(test, model, model_seg):
    '''
    Predcition function which takes dataframe containing ImageID as Input and perform 2 type of prediction on the image
    Initially, image is passed through the classification network which predicts whether the image has defect or not, if the model
    is 99% sure that the image has no defect, then the image is labeled as no-defect, if the model is not sure, it passes the image to the
    segmentation network, it again checks if the image has defect or not, if it has defect, then the type and location of defect is found
    '''
    # empty list to store results
    mask, image_id, has_mask = [], [], []
    
    #itetrating through each image in test data
    for i in test.image_path:
        
        img = io.imread(i)
        #normalizing
        img = img *1./255.
        #reshaping
        img = cv2.resize(img, (256,256))
        # converting img into array
        img = np.array(img, dtype=np.float64)
        #reshaping the image from 256,256,3 to 1,256,256,3
        img = np.reshape(img, (1,256,256,3))
        
        #making prediction for tumor in image
        is_defect = model.predict(img)
        
        #if tumour is not present we append the details of the image to the list
        if np.argmax(is_defect)==0:
            image_id.append(i)
            has_mask.append(0)
            mask.append('No mask :)')
            continue
        
        #Creating a empty array of shape 1,256,256,1
        X = np.empty((1,256,256,3))
        # read the image
        img = io.imread(i)
        #resizing the image and coverting them to array of type float64
        img = cv2.resize(img, (256,256))
        img = np.array(img, dtype=np.float64)
        
        # standardising the image
        img -= img.mean()
        img /= img.std()
        #converting the shape of image from 256,256,3 to 1,256,256,3
        X[0,] = img
        
        #make prediction of mask
        predict = model_seg.predict(X)
        
        # if sum of predicted mask is 0 then there is not tumour
        if predict.round().astype(int).sum()==0:
            image_id.append(i)
            has_mask.append(0)
            mask.append('No mask :)')
        else:
        #if the sum of pixel values are more than 0, then there is tumour
            image_id.append(i)
            has_mask.append(1)
            mask.append(predict)
            
    return pd.DataFrame({'image_path': image_id,'predicted_mask': mask,'has_mask': has_mask})
# making prediction
df_pred = prediction(test, model, seg_model)
df_pred
1/1 [==============================] - 1s 637ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 353ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 137ms/step 1/1 [==============================] - 0s 210ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 209ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 142ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 100ms/step 1/1 [==============================] - 0s 171ms/step 1/1 [==============================] - 0s 158ms/step 1/1 [==============================] - 0s 68ms/step 1/1 [==============================] - 0s 101ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 248ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 178ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 198ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 240ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 224ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 142ms/step 1/1 [==============================] - 0s 83ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 95ms/step 1/1 [==============================] - 0s 155ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 74ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 177ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 98ms/step 1/1 [==============================] - 0s 259ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 179ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 97ms/step 1/1 [==============================] - 0s 110ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 95ms/step 1/1 [==============================] - 0s 87ms/step 1/1 [==============================] - 0s 161ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 110ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 147ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 206ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 103ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 85ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 106ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 47ms/step 1/1 [==============================] - 0s 208ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 173ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 137ms/step 1/1 [==============================] - 0s 79ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 99ms/step 1/1 [==============================] - 0s 173ms/step 1/1 [==============================] - 0s 74ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 209ms/step 1/1 [==============================] - 0s 159ms/step 1/1 [==============================] - 0s 56ms/step 1/1 [==============================] - 0s 165ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 182ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 160ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 191ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 107ms/step 1/1 [==============================] - 0s 191ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 175ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 97ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 103ms/step 1/1 [==============================] - 0s 158ms/step 1/1 [==============================] - 0s 74ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 159ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 180ms/step 1/1 [==============================] - 0s 161ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 187ms/step 1/1 [==============================] - 0s 142ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 110ms/step 1/1 [==============================] - 0s 74ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 231ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 142ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 108ms/step 1/1 [==============================] - 0s 226ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 112ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 97ms/step 1/1 [==============================] - 0s 178ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 79ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 169ms/step 1/1 [==============================] - 0s 168ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 107ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 193ms/step 1/1 [==============================] - 0s 93ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 199ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 171ms/step 1/1 [==============================] - 0s 164ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 51ms/step 1/1 [==============================] - 0s 183ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 99ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 112ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 106ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 176ms/step 1/1 [==============================] - 0s 155ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 94ms/step 1/1 [==============================] - 0s 163ms/step 1/1 [==============================] - 0s 147ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 84ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 183ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 87ms/step 1/1 [==============================] - 0s 157ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 184ms/step 1/1 [==============================] - 0s 176ms/step 1/1 [==============================] - 0s 192ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 221ms/step 1/1 [==============================] - 0s 91ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 186ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 108ms/step 1/1 [==============================] - 0s 203ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 202ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 175ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 215ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 51ms/step 1/1 [==============================] - 0s 235ms/step 1/1 [==============================] - 0s 157ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 84ms/step 1/1 [==============================] - 0s 163ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 177ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 96ms/step 1/1 [==============================] - 0s 227ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 103ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 85ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 184ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 208ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 137ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 236ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 253ms/step 1/1 [==============================] - 0s 68ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 173ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 81ms/step 1/1 [==============================] - 0s 182ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 85ms/step 1/1 [==============================] - 0s 208ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 178ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 164ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 157ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 87ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 89ms/step 1/1 [==============================] - 0s 147ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 155ms/step 1/1 [==============================] - 0s 174ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 189ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 211ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 101ms/step 1/1 [==============================] - 0s 163ms/step 1/1 [==============================] - 0s 81ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 184ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 161ms/step 1/1 [==============================] - 0s 189ms/step 1/1 [==============================] - 0s 152ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 47ms/step 1/1 [==============================] - 0s 179ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 163ms/step 1/1 [==============================] - 0s 97ms/step 1/1 [==============================] - 0s 179ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 174ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 168ms/step 1/1 [==============================] - 0s 163ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 96ms/step 1/1 [==============================] - 0s 150ms/step 1/1 [==============================] - 0s 63ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 96ms/step 1/1 [==============================] - 0s 158ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 95ms/step 1/1 [==============================] - 0s 170ms/step 1/1 [==============================] - 0s 142ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 91ms/step 1/1 [==============================] - 0s 160ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 92ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 155ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 68ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 99ms/step 1/1 [==============================] - 0s 149ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 108ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 158ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 107ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 170ms/step 1/1 [==============================] - 0s 79ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 53ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 101ms/step 1/1 [==============================] - 0s 165ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 57ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 172ms/step 1/1 [==============================] - 0s 79ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 51ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 161ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 113ms/step 1/1 [==============================] - 0s 171ms/step 1/1 [==============================] - 0s 55ms/step 1/1 [==============================] - 0s 88ms/step 1/1 [==============================] - 0s 200ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 82ms/step 1/1 [==============================] - 0s 245ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 161ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 65ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 77ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 143ms/step 1/1 [==============================] - 0s 162ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 50ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 68ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 78ms/step 1/1 [==============================] - 0s 103ms/step 1/1 [==============================] - 0s 80ms/step 1/1 [==============================] - 0s 155ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 45ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 52ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 94ms/step 1/1 [==============================] - 0s 91ms/step 1/1 [==============================] - 0s 188ms/step 1/1 [==============================] - 0s 120ms/step 1/1 [==============================] - 0s 54ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 94ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 82ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 86ms/step 1/1 [==============================] - 0s 170ms/step 1/1 [==============================] - 0s 131ms/step 1/1 [==============================] - 0s 58ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 48ms/step 1/1 [==============================] - 0s 175ms/step 1/1 [==============================] - 0s 73ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 165ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 49ms/step 1/1 [==============================] - 0s 141ms/step 1/1 [==============================] - 0s 151ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 70ms/step 1/1 [==============================] - 0s 123ms/step 1/1 [==============================] - 0s 176ms/step 1/1 [==============================] - 0s 198ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 164ms/step 1/1 [==============================] - 0s 83ms/step 1/1 [==============================] - 0s 104ms/step 1/1 [==============================] - 0s 119ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 168ms/step 1/1 [==============================] - 0s 80ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 105ms/step 1/1 [==============================] - 0s 166ms/step 1/1 [==============================] - 0s 154ms/step 1/1 [==============================] - 0s 72ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 147ms/step 1/1 [==============================] - 0s 60ms/step 1/1 [==============================] - 0s 169ms/step 1/1 [==============================] - 0s 195ms/step 1/1 [==============================] - 0s 114ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 84ms/step 1/1 [==============================] - 0s 167ms/step 1/1 [==============================] - 0s 136ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 107ms/step 1/1 [==============================] - 0s 133ms/step 1/1 [==============================] - 0s 214ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 71ms/step 1/1 [==============================] - 0s 128ms/step 1/1 [==============================] - 0s 229ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 140ms/step 1/1 [==============================] - 0s 146ms/step 1/1 [==============================] - 0s 82ms/step 1/1 [==============================] - 0s 139ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 64ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 160ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 125ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 110ms/step 1/1 [==============================] - 0s 239ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 132ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 153ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 124ms/step 1/1 [==============================] - 0s 130ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 96ms/step 1/1 [==============================] - 0s 173ms/step 1/1 [==============================] - 0s 144ms/step 1/1 [==============================] - 0s 56ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 112ms/step 1/1 [==============================] - 0s 247ms/step 1/1 [==============================] - 0s 145ms/step 1/1 [==============================] - 0s 127ms/step 1/1 [==============================] - 0s 135ms/step 1/1 [==============================] - 0s 102ms/step 1/1 [==============================] - 0s 89ms/step 1/1 [==============================] - 0s 174ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 228ms/step 1/1 [==============================] - 0s 68ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 118ms/step 1/1 [==============================] - 0s 138ms/step 1/1 [==============================] - 0s 115ms/step 1/1 [==============================] - 0s 170ms/step 1/1 [==============================] - 0s 69ms/step 1/1 [==============================] - 0s 129ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 121ms/step 1/1 [==============================] - 0s 66ms/step 1/1 [==============================] - 0s 88ms/step 1/1 [==============================] - 0s 93ms/step 1/1 [==============================] - 0s 209ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 126ms/step 1/1 [==============================] - 0s 67ms/step 1/1 [==============================] - 0s 112ms/step 1/1 [==============================] - 0s 89ms/step 1/1 [==============================] - 0s 156ms/step 1/1 [==============================] - 0s 61ms/step 1/1 [==============================] - 0s 137ms/step 1/1 [==============================] - 0s 59ms/step 1/1 [==============================] - 0s 109ms/step 1/1 [==============================] - 0s 169ms/step 1/1 [==============================] - 0s 76ms/step 1/1 [==============================] - 0s 148ms/step 1/1 [==============================] - 0s 117ms/step 1/1 [==============================] - 0s 62ms/step 1/1 [==============================] - 0s 134ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 174ms/step 1/1 [==============================] - 0s 152ms/step 1/1 [==============================] - 0s 111ms/step 1/1 [==============================] - 0s 116ms/step 1/1 [==============================] - 0s 112ms/step 1/1 [==============================] - 0s 199ms/step 1/1 [==============================] - 0s 75ms/step 1/1 [==============================] - 0s 122ms/step 1/1 [==============================] - 0s 60ms/step
| image_path | predicted_mask | has_mask | |
|---|---|---|---|
| 0 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| 1 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | [[[[0.00024032], [9.582487e-05], [0.00015765],... | 1 | 
| 2 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| 3 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | [[[[0.00018928], [7.79208e-05], [0.00012878], ... | 1 | 
| 4 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| ... | ... | ... | ... | 
| 585 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| 586 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| 587 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | No mask :) | 0 | 
| 588 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | [[[[0.00018134], [7.214157e-05], [0.00012422],... | 1 | 
| 589 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | [[[[0.00019356], [7.524462e-05], [0.00012682],... | 1 | 
590 rows × 3 columns
# merging original and prediction df
df_pred = test.merge(df_pred, on='image_path')
df_pred.head(10)
| image_path | mask_path | mask | predicted_mask | has_mask | |
|---|---|---|---|---|---|
| 0 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | No mask :) | 0 | 
| 1 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 1 | [[[[0.00024032], [9.582487e-05], [0.00015765],... | 1 | 
| 2 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | No mask :) | 0 | 
| 3 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 1 | [[[[0.00018928], [7.79208e-05], [0.00012878], ... | 1 | 
| 4 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | No mask :) | 0 | 
| 5 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | No mask :) | 0 | 
| 6 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 1 | [[[[0.00015732], [7.119028e-05], [0.00012219],... | 1 | 
| 7 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 1 | [[[[0.0001833], [7.1754126e-05], [0.00011429],... | 1 | 
| 8 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | [[[[0.00032904], [0.00014852], [0.00015443], [... | 1 | 
| 9 | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | brain_dataset/lgg-mri-segmentation/kaggle_3m/T... | 0 | No mask :) | 0 | 
#visualizing prediction
count = 0
fig, axs = plt.subplots(15,5, figsize=(30,70))
for i in range(len(df_pred)):
    if df_pred.has_mask[i]==1 and count<15:
        #read mri images
        img = io.imread(df_pred.image_path[i])
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        axs[count][0].imshow(img)
        axs[count][0].title.set_text('Brain MRI')
        
        #read original mask
        mask = io.imread(df_pred.mask_path[i])
        axs[count][1].imshow(mask)
        axs[count][1].title.set_text('Original Mask')
        
        #read predicted mask
        pred = np.array(df_pred.predicted_mask[i]).squeeze().round()
        axs[count][2].imshow(pred)
        axs[count][2].title.set_text('AI predicted mask')
        
        #overlay original mask with MRI
        img[mask==255] = (255,0,0)
        axs[count][3].imshow(img)
        axs[count][3].title.set_text('Brain MRI with original mask (Ground Truth)')
        
        #overlay predicted mask and MRI
        img_ = io.imread(df_pred.image_path[i])
        img_ = cv2.cvtColor(img_, cv2.COLOR_BGR2RGB)
        img_[pred==1] = (0,255,150)
        axs[count][4].imshow(img_)
        axs[count][4].title.set_text('MRI with AI PREDICTED MASK')
        
        count +=1
    if (count==15):
        break
fig.tight_layout()        
the predictions made by AI are almost correct :)