nb_epoch = 20 # number of epochs to train on
batch_size = 256 # training batch size
filepath = 'weights/mymodel_P1_1024.h5'
model = cldnn.CLDNNLikeModel(None,input_shape=[256,2],classes = len(classes))
model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
model.summary()
def scheduler(epoch):
print("epoch({}) lr is {}".format(epoch, K.get_value(model.optimizer.lr)))
return K.get_value(model.optimizer.lr)
reduce_lr = LearningRateScheduler(scheduler)
history = model.fit(X_train,
Y_train,
batch_size=batch_size,
epochs=nb_epoch,
verbose=1,
validation_data=(X_test, Y_test),
callbacks = [reduce_lr,
keras.callbacks.ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='auto'),
keras.callbacks.ReduceLROnPlateau(monit
C#使用keras训练模型onnx
最新推荐文章于 2024-06-13 15:02:02 发布