tf.data和卷积神经网络对于卫星图片识别综合应用
// An highlighted block
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
import pathlib
data_dir = './2_class'
data_root = pathlib.Path(data_dir)
for item in data_root.iterdir():
print(item)
all_image_path = list(data_root.glob('*/*'))
len(all_image_path)
all_image_path[:3]
all_image_path = [str(path) for path in all_image_path]
all_image_path[10:12]
import random
random.shuffle(all_image_path)
image_count = len(all_image_path)
label_names = sorted(item.name for item in data_root.glob('*/'))
label_to_index = dict((name,index) for index,name in enumerate(label_names))
label_to_index
all_image_label = [label_to_index[pathlib.Path(p).parent.name] for p in all_image_path]
import IPython.display as display
index_to_label = dict((v,k) for k,v in label_to_index.items())
def load_preprosess_image(img_path):
img_raw = tf.io.read_file(img_path)
img_tensor = tf.image.decode_jpeg(img_raw,channels=3)
img_tensor = tf.image.resize(img_tensor,[256, 256])
img_tensor = tf.cast(img_tensor, tf.float32)
img = img_tensor/255
return img
path_ds = tf.data.Dataset.from_tensor_slices(all_image_path)
image_dataset = path_ds.map(load_preprosess_image)
label_dataset = tf.data.Dataset.from_tensor_slices(all_image_label)
image_dataset
dataset = tf.data.Dataset.zip((image_dataset, label_dataset))
test_count = int(image_count*0.2)
train_count = image_count - test_count
train_dataset = dataset.skip(test_count)
test_dataset = dataset.take(test_count)
BATCH_SIZE = 32
train_dataset = train_dataset.repeat().shuffle(buffer_size=train_count).batch(BATCH_SIZE)
test_dataset = test_dataset.batch(BATCH_SIZE)
model = tf.keras.Sequential()
model.add(tf.keras.layers.Conv2D(64, (3, 3), input_shape=(256, 256, 3), activation='relu'))
model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(128, (3, 3), activation='relu'))
model.add(tf.keras.layers.Conv2D(128, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(256, (3, 3), activation='relu'))
model.add(tf.keras.layers.Conv2D(256, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(512, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(512, (3, 3), activation='relu'))
model.add(tf.keras.layers.MaxPool2D())
model.add(tf.keras.layers.Conv2D(1024, (3, 3), activation='relu'))
model.add(tf.keras.layers.GlobalAveragePooling2D())
model.add(tf.keras.layers.Dense(1024, activation='relu'))
model.add(tf.keras.layers.Dense(256, activation='relu'))
model.add(tf.keras.layers.Dense(1,activation='sigmoid'))
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['acc'])
steps_per_epoch = train_count//BATCH_SIZE
validation_steps = test_count//BATCH_SIZE
history = model.fit(train_dataset,
epochs=3,
steps_per_epoch=steps_per_epoch,
validation_data=test_dataset,
validation_steps=validation_steps)