kaggle(一)训练猫狗数据集

记录第一次使用kaggle训练猫狗数据集
在这里插入图片描述

import os 
import shutil
os.listdir('../input/train/train')
base_dir = './cat_dog'

train_dir = os.path.join(base_dir,'train')
train_dir_dog  = os.path.join(train_dir,'dog')
train_dir_cat  = os.path.join(train_dir,'cat')

test_dir = os.path.join(base_dir,'test')
test_dir_dog  = os.path.join(test_dir,'dog')
test_dir_cat  = os.path.join(test_dir,'cat')
os.mkdir(base_dir)
os.mkdir(train_dir)
os.mkdir(train_dir_dog)
os.mkdir(train_dir_cat)
os.mkdir(test_dir)
os.mkdir(test_dir_dog)
os.mkdir(test_dir_cat)

fnames = ['cat.{}.jpg'.format(i) for i in range(1000)]
for fname in fnames:
    s = os.path.join('../input/train/train/cat',fname)
    d = os.path.join(train_dir_cat,fname)
    shutil.copyfile(s,d)
fnames = ['cat.{}.jpg'.format(i) for i in range(1000,1500)]
for fname in fnames:
    s = os.path.join('../input/train/train/cat',fname)
    d = os.path.join(test_dir_cat,fname)
    shutil.copyfile(s,d)
fnames = ['dog.{}.jpg'.format(i) for i in range(1000)]
for fname in fnames:
    s = os.path.join('../input/train/train/dog',fname)
    d = os.path.join(train_dir_dog,fname)
    shutil.copyfile(s,d)
fnames = ['dog.{}.jpg'.format(i) for i in range(1000,1500)]
for fname in fnames:
    s = os.path.join('../input/train/train/dog',fname)
    d = os.path.join(test_dir_dog,fname)
    shutil.copyfile(s,d)
  • 读取图片
  • 将图片解码
  • 预处理图片
  • 图片归一化
import keras
from keras import layers
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(rescale=1/255)
test_datagen = ImageDataGenerator(rescale=1/255)
train_generator = train_datagen.flow_from_directory(train_dir,
                                                    target_size=(200,200),
                                                   batch_size=20,
                                                   class_mode='binary')
Found 2000 images belonging to 2 classes.
test_generator = train_datagen.flow_from_directory(test_dir,
                                                    target_size=(200,200),
                                                   batch_size=20,
                                                   class_mode='binary')
Found 1000 images belonging to 2 classes.
model = keras.Sequential()
model.add(layers.Conv2D(64,(3,3), activation='relu',input_shape=(200,200,3)))
model.add(layers.Conv2D(64,(3,3), activation='relu'))
model.add(layers.MaxPool2D())
model.add(layers.Dropout(0.25))
model.add(layers.Conv2D(64,(3,3), activation='relu',input_shape=(200,200,3)))
model.add(layers.Conv2D(64,(3,3), activation='relu'))
model.add(layers.MaxPool2D())
model.add(layers.Dropout(0.25))
model.add(layers.Conv2D(64,(3,3), activation='relu',input_shape=(200,200,3)))
model.add(layers.Conv2D(64,(3,3), activation='relu'))
model.add(layers.MaxPool2D())
model.add(layers.Dropout(0.25))
model.add(layers.Flatten())
model.add(layers.Dense(236,activation='relu'))
model.add(layers.Dense(1,activation='sigmoid'))


model.summary()
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_25 (Conv2D)           (None, 198, 198, 64)      1792      
_________________________________________________________________
conv2d_26 (Conv2D)           (None, 196, 196, 64)      36928     
_________________________________________________________________
max_pooling2d_13 (MaxPooling (None, 98, 98, 64)        0         
_________________________________________________________________
dropout_13 (Dropout)         (None, 98, 98, 64)        0         
_________________________________________________________________
conv2d_27 (Conv2D)           (None, 96, 96, 64)        36928     
_________________________________________________________________
conv2d_28 (Conv2D)           (None, 94, 94, 64)        36928     
_________________________________________________________________
max_pooling2d_14 (MaxPooling (None, 47, 47, 64)        0         
_________________________________________________________________
dropout_14 (Dropout)         (None, 47, 47, 64)        0         
_________________________________________________________________
conv2d_29 (Conv2D)           (None, 45, 45, 64)        36928     
_________________________________________________________________
conv2d_30 (Conv2D)           (None, 43, 43, 64)        36928     
_________________________________________________________________
max_pooling2d_15 (MaxPooling (None, 21, 21, 64)        0         
_________________________________________________________________
dropout_15 (Dropout)         (None, 21, 21, 64)        0         
_________________________________________________________________
flatten_5 (Flatten)          (None, 28224)             0         
_________________________________________________________________
dense_9 (Dense)              (None, 236)               6661100   
_________________________________________________________________
dense_10 (Dense)             (None, 1)                 237       
=================================================================
Total params: 6,847,769
Trainable params: 6,847,769
Non-trainable params: 0
_________________________________________________________________
model.compile(optimizer=keras.optimizers.Adam(lr=0.0001),
              loss='binary_crossentropy',metrics=['acc'])
history = model.fit_generator(train_generator,
                              epochs=30,
                              steps_per_epoch=100,
                              validation_data=test_generator,
                              validation_steps=50
                            )
Epoch 1/30
100/100 [==============================] - 13s 131ms/step - loss: 0.6946 - acc: 0.4925 - val_loss: 0.6932 - val_acc: 0.5160
Epoch 2/30
100/100 [==============================] - 12s 116ms/step - loss: 0.6742 - acc: 0.5565 - val_loss: 0.6644 - val_acc: 0.6010
Epoch 3/30
100/100 [==============================] - 12s 116ms/step - loss: 0.6338 - acc: 0.6310 - val_loss: 0.6401 - val_acc: 0.6220
Epoch 4/30
100/100 [==============================] - 12s 116ms/step - loss: 0.6109 - acc: 0.6560 - val_loss: 0.6295 - val_acc: 0.6340
Epoch 5/30
100/100 [==============================] - 12s 116ms/step - loss: 0.5745 - acc: 0.6800 - val_loss: 0.6064 - val_acc: 0.6550
Epoch 6/30
100/100 [==============================] - 12s 116ms/step - loss: 0.5480 - acc: 0.7095 - val_loss: 0.5911 - val_acc: 0.6830
Epoch 7/30
100/100 [==============================] - 12s 116ms/step - loss: 0.5269 - acc: 0.7335 - val_loss: 0.5984 - val_acc: 0.6680
Epoch 8/30
100/100 [==============================] - 12s 116ms/step - loss: 0.5025 - acc: 0.7495 - val_loss: 0.5934 - val_acc: 0.6750
Epoch 9/30
100/100 [==============================] - 12s 116ms/step - loss: 0.4760 - acc: 0.7700 - val_loss: 0.6232 - val_acc: 0.6610
Epoch 10/30
100/100 [==============================] - 12s 116ms/step - loss: 0.4377 - acc: 0.7985 - val_loss: 0.6410 - val_acc: 0.6560
Epoch 11/30
100/100 [==============================] - 12s 117ms/step - loss: 0.4045 - acc: 0.8180 - val_loss: 0.6176 - val_acc: 0.6830
Epoch 12/30
100/100 [==============================] - 12s 116ms/step - loss: 0.3727 - acc: 0.8305 - val_loss: 0.6343 - val_acc: 0.6700
Epoch 13/30
100/100 [==============================] - 12s 116ms/step - loss: 0.3227 - acc: 0.8570 - val_loss: 0.6890 - val_acc: 0.6730
Epoch 14/30
100/100 [==============================] - 12s 117ms/step - loss: 0.2678 - acc: 0.8825 - val_loss: 0.7858 - val_acc: 0.6830
Epoch 15/30
100/100 [==============================] - 12s 116ms/step - loss: 0.2491 - acc: 0.8985 - val_loss: 0.7528 - val_acc: 0.6920
Epoch 16/30
100/100 [==============================] - 12s 116ms/step - loss: 0.1818 - acc: 0.9235 - val_loss: 0.8449 - val_acc: 0.6930
Epoch 17/30
100/100 [==============================] - 12s 117ms/step - loss: 0.1204 - acc: 0.9580 - val_loss: 0.9760 - val_acc: 0.6860
Epoch 18/30
100/100 [==============================] - 12s 118ms/step - loss: 0.0775 - acc: 0.9705 - val_loss: 1.1756 - val_acc: 0.6880
Epoch 19/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0548 - acc: 0.9865 - val_loss: 1.3155 - val_acc: 0.6920
Epoch 20/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0605 - acc: 0.9785 - val_loss: 1.6551 - val_acc: 0.6600
Epoch 21/30
100/100 [==============================] - 12s 117ms/step - loss: 0.0670 - acc: 0.9765 - val_loss: 1.2751 - val_acc: 0.6780
Epoch 22/30
100/100 [==============================] - 12s 117ms/step - loss: 0.0348 - acc: 0.9910 - val_loss: 1.4547 - val_acc: 0.6890
Epoch 23/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0511 - acc: 0.9825 - val_loss: 1.3448 - val_acc: 0.6840
Epoch 24/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0269 - acc: 0.9915 - val_loss: 1.5894 - val_acc: 0.6850
Epoch 25/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0147 - acc: 0.9980 - val_loss: 1.7083 - val_acc: 0.6590
Epoch 26/30
100/100 [==============================] - 12s 118ms/step - loss: 0.0097 - acc: 0.9980 - val_loss: 1.8089 - val_acc: 0.6830
Epoch 27/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0074 - acc: 0.9985 - val_loss: 2.1671 - val_acc: 0.6730
Epoch 28/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0068 - acc: 0.9995 - val_loss: 1.8426 - val_acc: 0.6950
Epoch 29/30
100/100 [==============================] - 12s 116ms/step - loss: 0.0024 - acc: 1.0000 - val_loss: 2.0147 - val_acc: 0.6840
Epoch 30/30
100/100 [==============================] - 12s 118ms/step - loss: 0.0463 - acc: 0.9855 - val_loss: 1.8073 - val_acc: 0.6670
import matplotlib.pyplot as plt
%matplotlib inline
plt.plot(history.epoch,history.history['loss'],label='loss')
plt.plot(history.epoch,history.history['val_loss'],label='val_loss')
plt.legend()

<matplotlib.legend.Legend at 0x7f1c9763a278>

在这里插入图片描述

plt.plot(history .epoch,history.history['acc'],label='acc')
plt.plot(history.epoch,history.history['val_acc'],label='val_acc')
plt.legend()
<matplotlib.legend.Legend at 0x7f1c9759cb00>

在这里插入图片描述

虽然有严重的过拟合,但值得记录

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

小刘要努力。

顺便点一个赞

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值