这篇文章是一个工具类,用来辅助医学图像分割实战 unet实现(二) 4、数据存储 这一小节的内容。
2019/5/2 更新:HDF5DatasetWrite可以动态扩展储存大小
# -*- coding: utf-8 -*-
import h5py
import os
import numpy as np
class HDF5DatasetGenerator:
def __init__(self, dbPath, batchSize, preprocessors=None,
aug=None, binarize=True, classes=2):
self.batchSize = batchSize
self.preprocessors = preprocessors
self.aug = aug
self.binarize = binarize
self.classes = classes
self.db = h5py.File(dbPath)
self.numImages = self.db["images"].shape[0]
# self.numImages = total
print("total images:",self.numImages)
self.num_batches_per_epoch = int((self.numImages-1)/batchSize) + 1
def generator(self, shuffle=True, passes=np.inf):
epochs = 0
while epochs < passes:
shuffle_indices = np.arange(self.numImages)
shuffle_indices = np.random.permutation