1. ImageDataGenerator
train_datagen = ImageDataGenerator(rescale=1./255,
zoom_range=0.20,
fill_mode="nearest")
print(type(train_datagen)) # >>> <class 'keras.preprocessing.image.ImageDataGenerator'>
validation_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(train_path,
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='categorical',
subset='training')
print(type(train_generator)) # >>> <class 'keras_preprocessing.image.directory_iterator.DirectoryIterator'>
validation_generator = validation_datagen.flow_from_directory(validation_path,
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode=None, # only data, no labels
shuffle=False)
history = model.fit_generator(train_generator,
steps_per_epoch=len(train_generator),
epochs=epochs)
predictions = model.predict_generator(validation_generator,
steps=len(validation_generator),
verbose=1)
2. image_dataset_from_directory
train_dataset = image_dataset_from_directory(train_dir,
shuffle=True, # 默认: True
batch_size=BATCH_SIZE, # 默认:(256 x 256)
image_size=IMG_SIZE) # 默认: 32
validation_dataset = image_dataset_from_directory(validation_dir,
label_mode='binary',
shuffle=True,
batch_size=BATCH_SIZE,
image_size=IMG_SIZE)
print(type(validation_dataset)) # >>> <class 'tensorflow.python.data.ops.dataset_ops.BatchDataset'>
test_dataset = image_dataset_from_directory(test_dir,
label_mode='binary',
shuffle=True,
image_size=IMG_SIZE)
# print(dir(test_dataset)) # dir(object):返回对象的属性和方法。
print(test_dataset.class_names) # 数据集属性中类的名称
history = model.fit(train_dataset,
epochs=initial_epochs,
validation_data=validation_dataset)