Object Segmentation Data Generation Code
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42 | class CustomDataGen(tf.keras.utils.Sequence):
def __init__(self, filenames,
batch_size,
shuffle=True):
self.filenames = filenames.copy() # name of files
self.batch_size = batch_size # batch size
self.shuffle = shuffle # shuffle data if true
self.n = len(self.filenames) # number of files
def on_epoch_end(self):
# shuffle data after each epoch
if self.shuffle:
np.random.shuffle(self.filenames) #
def __get_data(self, batches):
# Generates data containing batch_size samples
X_batch = [] # input list
y_batch = [] # output list
for filename in batches: # loop to create new batch
image = cv2.imread("image/"+filename) # load image
mask = cv2.imread("mask/"+filename) # load mask of image, for segmentation
X_batch.append(image)
y_batch.append(mask)
return np.array(X_batch, dtype="float32"), np.array(y_batch, dtype="float32")
def __getitem__(self, index):
# get name of the files for current batch
batches = self.filenames[index * self.batch_size:(index + 1) * self.batch_size]
# load images from file name
X, y = self.__get_data(batches)
return X, y
def __len__(self):
# return number of batches
return self.n // self.batch_size
|
1
2
3
4
5
6
7 | test_split = 0.3
training_data = CustomDataGen(filenames[:int(samples*(1-test_split))],
batch_size)
test_data = CustomDataGen(filenames[int(samples*(1-test_split)):samples],
batch_size)
|
No comments:
Post a Comment