IOGS - ARDF Projects - Dehazing

This file is a helper for loading the dehazing data

Imports

In [6]:
import numpy as np
import torch
import torch.utils.data
import os
import random

Define the dataset

The dataset is a Pytorch class. The datasets objects, used along a dataloader provide de data in a pytorch format. The following class, takes numpy arrays for the input data and the targets. It is also possible to operate a crop on the data.

Note: This class does not operate a data normalization, normalization must be either done before creating the dataset or modify the definition of the class.

In [2]:
class ImageDataset(torch.utils.data.Dataset):
    """Main Class for Image Folder loader."""

    def __init__(self, data, targets, crop=False, imsize=256):
        """Init function."""

        self.data = data
        self.targets = targets
        self.crop = crop
        self.imsize = imsize
        

    def __getitem__(self, index):
        """Get item."""

        data, target = self.data[index], self.targets[index]

        if self.crop:
            w, h, _ = data.shape
            x1 = random.randint(0, w - self.imsize)
            y1 = random.randint(0, h - self.imsize)
            data = data[x1:x1+self.imsize, y1:y1+self.imsize]
            target = target[x1:x1+self.imsize, y1:y1+self.imsize]
        
        # in troch channels are first
        data = data.transpose(2,0,1)
        target = target.transpose(2,0,1)

        # convert to float32
        data = data.astype(np.float32)
        target = target.astype(np.float32)

        # convert to torch tensors
        data = torch.from_numpy(data)
        target = torch.from_numpy(target)

        return data, target

    def __len__(self):
        """Length."""
        return self.data.shape[0]

Download data

We provide the data in the form of numpy arrays, you can download them here:

Supposing they are stored on you Google Drive in the data/dehazing folder, you can mount the folder using the following code. Set USE_COLAB to true.

In [10]:
USE_COLAB = False
if USE_COLAB:
    # mount the goole drive
    from google.colab import drive
    drive.mount('/content/drive')
    # download cifar on GoogleDrive
    data_dir = "/content/drive/My Drive/data/dehazing"
else:
    data_dir = "data/dehazing"

Load data

In [11]:
data_th = np.load(os.path.join(data_dir, "train_data.npy"))
gt_th = np.load(os.path.join(data_dir, "train_gt.npy"))

Create the data loader and iterate

In [12]:
dataset = ImageDataset(data_th, gt_th, crop=True, imsize=256)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=2, shuffle=True)

for inputs, targets in dataloader:
    print(inputs.size(), targets.size())
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([2, 3, 256, 256]) torch.Size([2, 3, 256, 256])
torch.Size([1, 3, 256, 256]) torch.Size([1, 3, 256, 256])