%%capture
!pip install segmentation-models-pytorch
!pip install -U git+https://github.com/albumentations-team/albumentations
!pip install --upgrade opencv-contrib-python
original author of the dataset : https://github.com/VikramShenoy97/Human-Segmentation-Dataset
!git clone https://github.com/parth1620/Human-Segmentation-Dataset-master.git
fatal: destination path 'Human-Segmentation-Dataset-master' already exists and is not an empty directory.
import sys
sys.path.append('/content/Human-Segmentation-Dataset-master')
import torch
import cv2
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from tqdm import tqdm
from skimage import io
import helper
from torch.utils.data import Dataset , DataLoader
from torchvision.transforms import v2
from torch import nn
import random
import os
df = pd.read_csv('/kaggle/working/Human-Segmentation-Dataset-master/train.csv')
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
DATA_DIR = "/kaggle/working/"
IMG_SIZE = 224
class CFG :
epcohs = 100
seed = 42
batch_size = 16
encoder = 'efficientnet-b2'
encoder_weights = "imagenet"
def seed_torch(seed=42):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
seed_torch(seed=CFG.seed)
df.head()
| masks | images | |
|---|---|---|
| 0 | Human-Segmentation-Dataset-master/Ground_Truth... | Human-Segmentation-Dataset-master/Training_Ima... |
| 1 | Human-Segmentation-Dataset-master/Ground_Truth... | Human-Segmentation-Dataset-master/Training_Ima... |
| 2 | Human-Segmentation-Dataset-master/Ground_Truth... | Human-Segmentation-Dataset-master/Training_Ima... |
| 3 | Human-Segmentation-Dataset-master/Ground_Truth... | Human-Segmentation-Dataset-master/Training_Ima... |
| 4 | Human-Segmentation-Dataset-master/Ground_Truth... | Human-Segmentation-Dataset-master/Training_Ima... |
df.shape
(290, 2)
train_df , val_df = train_test_split(df , test_size= 0.2 , random_state= CFG.seed)
def show_data(idx) :
image , mask = DATA_DIR + df.iloc[idx]
image = io.imread(image)
mask = io.imread(mask)
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(10,5))
ax1.set_title('IMAGE')
ax1.imshow(image)
ax2.set_title('GROUND TRUTH')
ax2.imshow(mask,cmap = 'gray')
show_data(99)
albumentation documentation : https://albumentations.ai/docs/
import albumentations as A
def get_train_augs() :
return A.Compose([A.Resize(height=IMG_SIZE , width=IMG_SIZE) ,
A.Rotate(30) ,
A.HorizontalFlip(0.5) ,
A.VerticalFlip(0.5)] , is_check_shapes=False)
def get_val_augs() :
return A.Compose([A.Resize(height=IMG_SIZE , width=IMG_SIZE) ,
], is_check_shapes=False)
class Human_Segmentation_set(Dataset):
def __init__(self , annotations_file , img_dir , transform = None , target_transform = None) :
self.img_labels = annotations_file
self.img_dir = img_dir
self.transform = transform
self.target_transform = target_transform
def __len__(self) :
return len(self.img_labels)
def __getitem__(self , idx) :
mask_path , image_path = self.img_dir + self.img_labels.iloc[idx]
# print(image.shape)
# io.imread is not giving us the color_channels so we will preceed to reading images with cv2.imread()
image = cv2.imread(image_path)
mask = cv2.imread(mask_path)
if self.transform :
data = self.transform(image = image , mask = mask)
image = data['image']
mask = data['mask']
# print(image.shape)
image_tensor = torch.from_numpy(image).permute(2,0,1)/255.
mask_tensor = torch.from_numpy(mask).permute(2,0,1)/255.
image_tensor = image_tensor.to(DEVICE)
mask_tensor = mask_tensor.to(DEVICE)
return image_tensor , mask_tensor
def show_data(self , idx) :
mask_path , image_path = self.img_dir + self.img_labels.iloc[idx]
image = cv2.imread(image_path)
mask = cv2.imread(mask_path)
print(image.shape , mask.shape)
if self.transform :
data = self.transform(image = image , mask = mask)
image = data['image']
mask = data['mask']
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(10,5))
ax1.set_title('IMAGE')
ax1.imshow(image)
ax2.set_title('GROUND TRUTH')
ax2.imshow(mask,cmap = 'gray')
train_set = Human_Segmentation_set(train_df , DATA_DIR , get_train_augs())
val_set = Human_Segmentation_set(val_df , DATA_DIR , get_val_augs())
train_set.show_data(9)
(640, 480, 3) (577, 433, 3)
print(f"Size of Trainset : {len(train_set)}")
print(f"Size of Validset : {len(val_set)}")
Size of Trainset : 232 Size of Validset : 58
train_loader = DataLoader(train_set , batch_size=CFG.batch_size , shuffle = True )
val_loader = DataLoader(val_set , batch_size=CFG.batch_size , shuffle = False )
# for image , mask in train_loader :
# break
# print(image.shape , mask.shape)
segmentation_models_pytorch documentation : https://smp.readthedocs.io/en/latest/
import segmentation_models_pytorch as smp
from segmentation_models_pytorch.losses import DiceLoss
class Human_seg_model(nn.Module):
def __init__(self):
super(Human_seg_model, self).__init__()
self.model = smp.Unet(
encoder_name=CFG.encoder,
encoder_weights=CFG.encoder_weights,
in_channels=3,
classes=3,
activation=None
).to(DEVICE)
def forward(self, images, masks=None):
logits = self.model(images)
# print(images.shape)
# print(logits.shape)
# print(masks.shape)
if masks is not None:
loss1 = DiceLoss(mode='binary')(logits, masks)
loss2 = nn.BCEWithLogitsLoss()(logits, masks)
return logits, loss1 + loss2
return logits
def train_fn(model , train_loader , optimizer) :
model.train()
total_loss = 0
for images , masks in tqdm(train_loader) :
optimizer.zero_grad()
images , masks = images.to(DEVICE) , masks.to(DEVICE)
logits , loss = model(images , masks)
loss.backward()
optimizer.step()
total_loss+=loss.item()
return total_loss/len(train_loader)
def val_fn(model , val_loader) :
model.eval()
total_loss = 0
with torch.inference_mode() :
for image , mask in tqdm(val_loader) :
logits = model(image)
loss = DiceLoss(mode = 'binary')(logits , mask)
total_loss+=loss
return total_loss/len(val_loader)
SegModel = Human_seg_model().to(DEVICE)
optimizer = torch.optim.Adam(SegModel.parameters())
for i in range(CFG.epcohs) :
train_loss = train_fn(SegModel , train_loader ,optimizer)
val_loss = val_fn(SegModel , val_loader )
print(f"EPOCH {i+1} : Train Loss : {train_loss} | Val LOSS : {val_loss}")
100%|██████████| 15/15 [00:03<00:00, 3.98it/s] 100%|██████████| 4/4 [00:00<00:00, 7.95it/s]
EPOCH 1 : Train Loss : 0.9997095545132955 | Val LOSS : 0.25976723432540894
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.41it/s]
EPOCH 2 : Train Loss : 0.5496830582618714 | Val LOSS : 0.20350058376789093
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.07it/s]
EPOCH 3 : Train Loss : 0.3670327146848043 | Val LOSS : 0.20375806093215942
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.55it/s]
EPOCH 4 : Train Loss : 0.2536358783642451 | Val LOSS : 0.2708975076675415
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 5 : Train Loss : 0.25740688145160673 | Val LOSS : 0.1315029114484787
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.60it/s]
EPOCH 6 : Train Loss : 0.22397208909193675 | Val LOSS : 0.12827639281749725
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.65it/s]
EPOCH 7 : Train Loss : 0.19757938583691914 | Val LOSS : 0.09063972532749176
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.59it/s]
EPOCH 8 : Train Loss : 0.17595191597938536 | Val LOSS : 0.18408235907554626
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 9 : Train Loss : 0.19512994786103566 | Val LOSS : 0.0898253470659256
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.62it/s]
EPOCH 10 : Train Loss : 0.17984063525994617 | Val LOSS : 0.08905310928821564
100%|██████████| 15/15 [00:03<00:00, 4.14it/s] 100%|██████████| 4/4 [00:00<00:00, 9.53it/s]
EPOCH 11 : Train Loss : 0.16808033287525176 | Val LOSS : 0.0758216381072998
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.49it/s]
EPOCH 12 : Train Loss : 0.18575524886449177 | Val LOSS : 0.07853583991527557
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.36it/s]
EPOCH 13 : Train Loss : 0.16434621860583623 | Val LOSS : 0.07850758731365204
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.58it/s]
EPOCH 14 : Train Loss : 0.14620108058055242 | Val LOSS : 0.07389053702354431
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 15 : Train Loss : 0.1326741561293602 | Val LOSS : 0.07808315753936768
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.58it/s]
EPOCH 16 : Train Loss : 0.14195274362961452 | Val LOSS : 0.07068488001823425
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 17 : Train Loss : 0.1352854112784068 | Val LOSS : 0.06913159787654877
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 18 : Train Loss : 0.12411548097928365 | Val LOSS : 0.09605342149734497
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.46it/s]
EPOCH 19 : Train Loss : 0.12370129575332006 | Val LOSS : 0.06515738368034363
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.52it/s]
EPOCH 20 : Train Loss : 0.1318427726626396 | Val LOSS : 0.07181771099567413
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.58it/s]
EPOCH 21 : Train Loss : 0.12094110548496247 | Val LOSS : 0.06769019365310669
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.55it/s]
EPOCH 22 : Train Loss : 0.12416018644968668 | Val LOSS : 0.06716792285442352
100%|██████████| 15/15 [00:03<00:00, 4.22it/s] 100%|██████████| 4/4 [00:00<00:00, 9.61it/s]
EPOCH 23 : Train Loss : 0.11271657248338064 | Val LOSS : 0.06794945895671844
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 24 : Train Loss : 0.11671340664227804 | Val LOSS : 0.06074543297290802
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.60it/s]
EPOCH 25 : Train Loss : 0.10691324174404145 | Val LOSS : 0.06174600124359131
100%|██████████| 15/15 [00:03<00:00, 4.22it/s] 100%|██████████| 4/4 [00:00<00:00, 9.63it/s]
EPOCH 26 : Train Loss : 0.09816926668087642 | Val LOSS : 0.06244935095310211
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 27 : Train Loss : 0.101382448275884 | Val LOSS : 0.05727836489677429
100%|██████████| 15/15 [00:03<00:00, 4.22it/s] 100%|██████████| 4/4 [00:00<00:00, 9.47it/s]
EPOCH 28 : Train Loss : 0.0921863983074824 | Val LOSS : 0.06097997725009918
100%|██████████| 15/15 [00:03<00:00, 4.21it/s] 100%|██████████| 4/4 [00:00<00:00, 9.57it/s]
EPOCH 29 : Train Loss : 0.10598641236623128 | Val LOSS : 0.05587342381477356
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 30 : Train Loss : 0.11117593646049499 | Val LOSS : 0.062125012278556824
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.55it/s]
EPOCH 31 : Train Loss : 0.11133797814448675 | Val LOSS : 0.06312143802642822
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.53it/s]
EPOCH 32 : Train Loss : 0.11581649233897527 | Val LOSS : 0.06398874521255493
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.23it/s]
EPOCH 33 : Train Loss : 0.10613083491722743 | Val LOSS : 0.059580639004707336
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.45it/s]
EPOCH 34 : Train Loss : 0.09884818941354752 | Val LOSS : 0.06303133070468903
100%|██████████| 15/15 [00:03<00:00, 4.10it/s] 100%|██████████| 4/4 [00:00<00:00, 9.49it/s]
EPOCH 35 : Train Loss : 0.09892554879188538 | Val LOSS : 0.059409573674201965
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.55it/s]
EPOCH 36 : Train Loss : 0.09265781591335932 | Val LOSS : 0.05542758107185364
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 37 : Train Loss : 0.08497662196556727 | Val LOSS : 0.06402604281902313
100%|██████████| 15/15 [00:03<00:00, 4.22it/s] 100%|██████████| 4/4 [00:00<00:00, 9.63it/s]
EPOCH 38 : Train Loss : 0.08401908030112584 | Val LOSS : 0.0568007230758667
100%|██████████| 15/15 [00:03<00:00, 4.22it/s] 100%|██████████| 4/4 [00:00<00:00, 9.59it/s]
EPOCH 39 : Train Loss : 0.07984224557876587 | Val LOSS : 0.057211726903915405
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.53it/s]
EPOCH 40 : Train Loss : 0.08237162629763285 | Val LOSS : 0.05572015047073364
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.48it/s]
EPOCH 41 : Train Loss : 0.08560297439495722 | Val LOSS : 0.057647645473480225
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.09it/s]
EPOCH 42 : Train Loss : 0.08808586299419403 | Val LOSS : 0.0607854425907135
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 43 : Train Loss : 0.08581433594226837 | Val LOSS : 0.05591282248497009
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 44 : Train Loss : 0.0905124639471372 | Val LOSS : 0.0667969286441803
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.52it/s]
EPOCH 45 : Train Loss : 0.08317660142978033 | Val LOSS : 0.061113402247428894
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 46 : Train Loss : 0.09023476392030716 | Val LOSS : 0.055951207876205444
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.47it/s]
EPOCH 47 : Train Loss : 0.10768239150444667 | Val LOSS : 0.07472625374794006
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.34it/s]
EPOCH 48 : Train Loss : 0.11004555424054464 | Val LOSS : 0.07334202527999878
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.64it/s]
EPOCH 49 : Train Loss : 0.10195254981517791 | Val LOSS : 0.06745968759059906
100%|██████████| 15/15 [00:03<00:00, 4.14it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 50 : Train Loss : 0.10390354891618094 | Val LOSS : 0.06496486067771912
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.35it/s]
EPOCH 51 : Train Loss : 0.1074952890475591 | Val LOSS : 0.07852084934711456
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.53it/s]
EPOCH 52 : Train Loss : 0.10971737504005433 | Val LOSS : 0.07351009547710419
100%|██████████| 15/15 [00:03<00:00, 4.21it/s] 100%|██████████| 4/4 [00:00<00:00, 9.59it/s]
EPOCH 53 : Train Loss : 0.1137406234939893 | Val LOSS : 0.07234345376491547
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.51it/s]
EPOCH 54 : Train Loss : 0.09817893654108048 | Val LOSS : 0.07198452949523926
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.49it/s]
EPOCH 55 : Train Loss : 0.08935619542996089 | Val LOSS : 0.06855733692646027
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.60it/s]
EPOCH 56 : Train Loss : 0.08404100686311722 | Val LOSS : 0.06919020414352417
100%|██████████| 15/15 [00:03<00:00, 4.04it/s] 100%|██████████| 4/4 [00:00<00:00, 9.38it/s]
EPOCH 57 : Train Loss : 0.07602214316527049 | Val LOSS : 0.0683085173368454
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.63it/s]
EPOCH 58 : Train Loss : 0.07709308390816053 | Val LOSS : 0.06485989689826965
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.39it/s]
EPOCH 59 : Train Loss : 0.08666286071141562 | Val LOSS : 0.07388283312320709
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.56it/s]
EPOCH 60 : Train Loss : 0.08885819440086683 | Val LOSS : 0.06310684978961945
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.50it/s]
EPOCH 61 : Train Loss : 0.09433518548806509 | Val LOSS : 0.05752560496330261
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.47it/s]
EPOCH 62 : Train Loss : 0.08056580672661463 | Val LOSS : 0.05832841992378235
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.54it/s]
EPOCH 63 : Train Loss : 0.07833703607320786 | Val LOSS : 0.05453668534755707
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.36it/s]
EPOCH 64 : Train Loss : 0.07197273795803388 | Val LOSS : 0.051689088344573975
100%|██████████| 15/15 [00:03<00:00, 4.11it/s] 100%|██████████| 4/4 [00:00<00:00, 9.03it/s]
EPOCH 65 : Train Loss : 0.07233829746643702 | Val LOSS : 0.0515751987695694
100%|██████████| 15/15 [00:03<00:00, 4.11it/s] 100%|██████████| 4/4 [00:00<00:00, 9.45it/s]
EPOCH 66 : Train Loss : 0.06661542430520058 | Val LOSS : 0.05085156857967377
100%|██████████| 15/15 [00:03<00:00, 4.11it/s] 100%|██████████| 4/4 [00:00<00:00, 9.24it/s]
EPOCH 67 : Train Loss : 0.07025771538416545 | Val LOSS : 0.04967103898525238
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.39it/s]
EPOCH 68 : Train Loss : 0.06453891371687254 | Val LOSS : 0.05089910328388214
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.04it/s]
EPOCH 69 : Train Loss : 0.06770160968104998 | Val LOSS : 0.050984352827072144
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.32it/s]
EPOCH 70 : Train Loss : 0.0727398989101251 | Val LOSS : 0.04907435178756714
100%|██████████| 15/15 [00:03<00:00, 4.14it/s] 100%|██████████| 4/4 [00:00<00:00, 9.38it/s]
EPOCH 71 : Train Loss : 0.07690125976999601 | Val LOSS : 0.06565482914447784
100%|██████████| 15/15 [00:03<00:00, 4.13it/s] 100%|██████████| 4/4 [00:00<00:00, 9.38it/s]
EPOCH 72 : Train Loss : 0.07762517084678014 | Val LOSS : 0.05416996777057648
100%|██████████| 15/15 [00:03<00:00, 4.12it/s] 100%|██████████| 4/4 [00:00<00:00, 8.76it/s]
EPOCH 73 : Train Loss : 0.0728624259432157 | Val LOSS : 0.051189079880714417
100%|██████████| 15/15 [00:03<00:00, 4.12it/s] 100%|██████████| 4/4 [00:00<00:00, 9.27it/s]
EPOCH 74 : Train Loss : 0.07399392947554588 | Val LOSS : 0.05486442148685455
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.40it/s]
EPOCH 75 : Train Loss : 0.07041777074337005 | Val LOSS : 0.0525328665971756
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.42it/s]
EPOCH 76 : Train Loss : 0.0683072509864966 | Val LOSS : 0.054178521037101746
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.40it/s]
EPOCH 77 : Train Loss : 0.0736974244316419 | Val LOSS : 0.05839262902736664
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.40it/s]
EPOCH 78 : Train Loss : 0.07369779050350189 | Val LOSS : 0.05916334688663483
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.28it/s]
EPOCH 79 : Train Loss : 0.06941532740990321 | Val LOSS : 0.05844599008560181
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.34it/s]
EPOCH 80 : Train Loss : 0.07925675660371781 | Val LOSS : 0.062327831983566284
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.04it/s]
EPOCH 81 : Train Loss : 0.0755162407954534 | Val LOSS : 0.05524091422557831
100%|██████████| 15/15 [00:03<00:00, 4.13it/s] 100%|██████████| 4/4 [00:00<00:00, 9.28it/s]
EPOCH 82 : Train Loss : 0.07414799109101296 | Val LOSS : 0.052666500210762024
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.36it/s]
EPOCH 83 : Train Loss : 0.06890688240528106 | Val LOSS : 0.05592821538448334
100%|██████████| 15/15 [00:03<00:00, 4.14it/s] 100%|██████████| 4/4 [00:00<00:00, 9.31it/s]
EPOCH 84 : Train Loss : 0.0661033496260643 | Val LOSS : 0.056860923767089844
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.32it/s]
EPOCH 85 : Train Loss : 0.07382206519444784 | Val LOSS : 0.06475362181663513
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.26it/s]
EPOCH 86 : Train Loss : 0.07538855175177256 | Val LOSS : 0.07206526398658752
100%|██████████| 15/15 [00:03<00:00, 4.12it/s] 100%|██████████| 4/4 [00:00<00:00, 9.17it/s]
EPOCH 87 : Train Loss : 0.10579567576448122 | Val LOSS : 0.09540912508964539
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 9.47it/s]
EPOCH 88 : Train Loss : 0.09216355035702388 | Val LOSS : 0.0720854252576828
100%|██████████| 15/15 [00:03<00:00, 4.13it/s] 100%|██████████| 4/4 [00:00<00:00, 9.45it/s]
EPOCH 89 : Train Loss : 0.08772892206907272 | Val LOSS : 0.06775330007076263
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.39it/s]
EPOCH 90 : Train Loss : 0.08669436027606328 | Val LOSS : 0.07414776086807251
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.46it/s]
EPOCH 91 : Train Loss : 0.0784996765355269 | Val LOSS : 0.07533371448516846
100%|██████████| 15/15 [00:03<00:00, 4.17it/s] 100%|██████████| 4/4 [00:00<00:00, 9.44it/s]
EPOCH 92 : Train Loss : 0.0826564242442449 | Val LOSS : 0.08051809668540955
100%|██████████| 15/15 [00:03<00:00, 4.19it/s] 100%|██████████| 4/4 [00:00<00:00, 9.51it/s]
EPOCH 93 : Train Loss : 0.07125108689069748 | Val LOSS : 0.07110950350761414
100%|██████████| 15/15 [00:03<00:00, 4.15it/s] 100%|██████████| 4/4 [00:00<00:00, 8.94it/s]
EPOCH 94 : Train Loss : 0.06506957908471425 | Val LOSS : 0.0675150454044342
100%|██████████| 15/15 [00:03<00:00, 4.12it/s] 100%|██████████| 4/4 [00:00<00:00, 9.30it/s]
EPOCH 95 : Train Loss : 0.06638172939419747 | Val LOSS : 0.07126311957836151
100%|██████████| 15/15 [00:03<00:00, 4.16it/s] 100%|██████████| 4/4 [00:00<00:00, 9.18it/s]
EPOCH 96 : Train Loss : 0.07201234151919683 | Val LOSS : 0.06252387166023254
100%|██████████| 15/15 [00:03<00:00, 4.09it/s] 100%|██████████| 4/4 [00:00<00:00, 9.43it/s]
EPOCH 97 : Train Loss : 0.06964358786741892 | Val LOSS : 0.0568305104970932
100%|██████████| 15/15 [00:03<00:00, 4.18it/s] 100%|██████████| 4/4 [00:00<00:00, 9.45it/s]
EPOCH 98 : Train Loss : 0.07116180310646693 | Val LOSS : 0.05228324234485626
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.48it/s]
EPOCH 99 : Train Loss : 0.06614651655157407 | Val LOSS : 0.050836846232414246
100%|██████████| 15/15 [00:03<00:00, 4.20it/s] 100%|██████████| 4/4 [00:00<00:00, 9.45it/s]
EPOCH 100 : Train Loss : 0.06907231460014979 | Val LOSS : 0.05339653789997101
val_set
<__main__.Human_Segmentation_set at 0x7e8326754670>
prediction.shape
(224, 224, 3)
def show_image(image,mask,pred_image = None):
if pred_image.all() == None:
f, (ax1, ax2) = plt.subplots(1, 2, figsize=(10,5))
ax1.set_title('IMAGE')
ax1.imshow(image,cmap = 'gray')
ax2.set_title('GROUND TRUTH')
ax2.imshow(mas,cmap = 'gray')
elif pred_image.all() != None :
f, (ax1, ax2,ax3) = plt.subplots(1, 3, figsize=(10,5))
ax1.set_title('IMAGE')
ax1.imshow(image,cmap = 'gray')
ax2.set_title('GROUND TRUTH')
ax2.imshow(mask,cmap = 'gray')
ax3.set_title('MODEL OUTPUT')
ax3.imshow(pred_image,cmap = 'gray')
with torch.inference_mode() :
for image , mask in tqdm(val_set) :
image.to(DEVICE)
logits = SegModel(image.unsqueeze(0))
prediction = logits.squeeze().permute(1,2,0).detach().cpu().numpy()
mask = mask.permute(1,2,0).detach().cpu().numpy()
image = image.permute(1,2,0).detach().cpu().numpy()
show_image(image ,mask , prediction)
34%|███▍ | 20/58 [00:01<00:03, 12.53it/s]/tmp/ipykernel_34/3939027367.py:15: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`). Consider using `matplotlib.pyplot.close()`. f, (ax1, ax2,ax3) = plt.subplots(1, 3, figsize=(10,5)) 100%|██████████| 58/58 [00:05<00:00, 10.31it/s]