qubvel / segmentation_models

Segmentation models with pretrained backbones. Keras and TensorFlow Keras.
MIT License
4.75k stars 1.03k forks source link

validation_data` should be a tuple `(val_x, val_y, val_sample_weight)` or `(val_x, val_y)` #290

Open sunwoo76 opened 4 years ago

sunwoo76 commented 4 years ago

Dataset.py

`` import os #for accessing the file system of the system import random from skimage import io from skimage.transform import resize import numpy as np import tensorflow as tf from tensorflow import keras

seed = 2323 #seed values to create random values random.seed = seed np.random.seed = seed tf.seed = seed

class DataGenerator(keras.utils.Sequence): def init(self, ids, imgs_dir, masks_dir, batch_size=10, img_size=128, n_classes=1, n_channels=3, shuffle=True): self.id_names = ids self.indexes = np.arange(len(self.id_names)) self.imgs_dir = imgs_dir self.masks_dir = masks_dir self.batch_size = batch_size self.img_size = img_size self.n_classes = n_classes self.n_channels = n_channels self.shuffle = shuffle self.on_epoch_end()

def on_epoch_end(self):
    'Updates indexes after each epoch'
    self.indexes = np.arange(len(self.id_names))
    if self.shuffle == True:
        np.random.shuffle(self.indexes)

def __data_generation__(self, id_name):
    'Generates data containing batch_size samples' # X : (n_samples, *dim, n_channels)
    # Initialization
    img_path = os.path.join(self.imgs_dir, id_name)  # polyp segmentation/images/id_name.jpg
    mask_path = os.path.join(self.masks_dir, id_name) # polyp segmenatation/masks/id_name.jpg

    image = io.imread(img_path)  # reading image to image vaiable
    image = resize(image, (self.img_size, self.img_size), anti_aliasing=True)  # resizing input image to 128 * 128

    mask = io.imread(mask_path, as_gray=True)  # mask image of same size with all zeros
    mask = resize(mask, (self.img_size, self.img_size), anti_aliasing=True)  # resizing mask to fit the 128 * 128 image
    mask = np.expand_dims(mask, axis=-1)

    image = image / 255.0
    mask = mask / 255.0

    return image, mask

def __len__(self):
    "Denotes the number of batches per epoch"
    return int(np.floor(len(self.id_names) / self.batch_size))

def __getitem__(self, index):  # index : batch no.

    # Generate indexes of the batch
    batch_ids = self.id_names[index * self.batch_size:(index + 1) * self.batch_size]

    imgs = list()
    masks = list()

    for id_name in batch_ids:
        img, mask = self.__data_generation__(id_name)
        imgs.append(img)
        masks.append(mask)

    imgs = np.array(imgs)
    masks = np.array(masks)

    return imgs, masks  # return batch

``

train.py

`` import os # for accessing the file system of the system import segmentation_models as sm from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau from dataset import DataGenerator import tensorflow.keras as keras

if name == 'main':

hyperparameter

image_size = 256
train_path = './data/train/imgs/'  # address of the dataset
mask_path = './data/train/masks/'
epochs = 10  # number of time we need to train dataset
batch_size = 2  # tarining batch size

# train path
train_ids = os.listdir(train_path)
# Validation Data Size
val_data_size = 10  # size of set of images used for the validation

valid_ids = train_ids[:val_data_size]  # list of image ids used for validation of result 0 to 9
train_ids = train_ids[val_data_size:]  # list of image ids used for training dataset
# print(valid_ids, "\n\n")
print("training_size: ", len(train_ids), "validation_size: ", len(valid_ids))

train_gen = DataGenerator(train_ids, train_path, mask_path, img_size=image_size, batch_size=batch_size)
valid_gen = DataGenerator(valid_ids, train_path, mask_path, img_size=image_size, batch_size=batch_size)
print("total training batches: ", len(train_gen))
print("total validaton batches: ", len(valid_gen))
train_steps = len(train_ids) // batch_size
valid_steps = len(valid_ids) // batch_size

BACKBONE = 'resnet34'
preprocess_input = sm.get_preprocessing(BACKBONE)

# define model
model = sm.Unet(BACKBONE, classes=1, encoder_weights='imagenet')
model.compile(
    'Adam',
    loss=sm.losses.bce_jaccard_loss,
    metrics=[sm.metrics.iou_score],
)

callbacks = [
    EarlyStopping(patience=10, verbose=1),
    ReduceLROnPlateau(factor=0.1, patience=3, min_lr=0.00001, verbose=1),
    ModelCheckpoint('model-tgs-salt.h5', verbose=1,
                    save_best_only=True, save_weights_only=True)]

# fit model
model.fit_generator(generator=train_gen, validation_data=valid_gen, steps_per_epoch=train_steps, validation_steps=valid_steps, epochs=50, callbacks=callbacks)

``

----> 2 model.fit_generator(generator=train_gen, validation_data=valid_gen, steps_per_epoch=train_steps, validation_steps=valid_steps, epochs=50) ValueError: validation_data should be a tuple (val_x, val_y, val_sample_weight) or (val_x, val_y).

Could you tell me why does this error occur?

sunwoo76 commented 4 years ago

I solve this problem. I think that 'segmentation_models' is implemented using 'keras' not 'tf.keras'.

Error version(for using segmentation models) import tensorflow
Class Datagenerator('tf.keras.utils.Sequence')

Revised Version import keras class Datagenerator('keras.utils.Sequence')

and it worked. I hope you guys get help. thanks

ntelo007 commented 4 years ago

can you tell me the exact versions of Keras, Keras-Applications, Keras-Preprocessing and Tensorflow that you used to make this work?

ramdhan1989 commented 4 years ago

I got same issue