1、理论原理
超参数搜索:
https://blog.csdn.net/caoyuan666/article/details/105933836
2、代码
这里手动实现网格搜索,其实就是提前设立几个参数,通过for函数来实现。
#learning_rate[1e-4,3e-4,1e-3,3e-3,1e-2,3e-2]
#w = w + grad * learning_rate
histories=[]
learning_rate=[1e-4,3e-4,1e-3,3e-3,1e-2,3e-2]
for lr in learning_rate:
model=keras.models.Sequential([
keras.layers.Dense(30,activation='relu',
input_shape=x_train.shape[1:]),
keras.layers.Dense(1),
])
optimizer = keras.optimizers.SGD(lr)
model.summary()
model.compile(loss='mean_squared_error',
optimizer=optimizer,)
callbacks=[keras.callbacks.EarlyStopping(patience=5,min_delta=1e-2)]
history=model.fit(x_train_scaled,y_train,
epochs=100,
validation_data=(x_valid_scaled,y_valid),
callbacks = callbacks )
histories.append(history)