TensorFlow2.0从入门到进阶——第二章问题总结:wide_deep模型、超参数搜索

看的视频——https://www.bilibili.com/video/av79196096?p=23

wide_deep模型

1、函数式API实现

单输入单输出

input即为wide模型的输入、又为deep模型的输入 输出仅有一个output

# model = keras.models.Sequential([
#     keras.layers.Dense(30, activation = 'relu', input_shape = x_train.shape[1:]),
#     keras.layers.Dense(1),
# ])

input = keras.layers.Input(shape=x_train.shape[1:])
#函数式API 这个层次可以像函数一样地去使用它
hidden1 = keras.layers.Dense(30, activation = 'relu')(input)
hidden2 = keras.layers.Dense(30,activation = 'relu')(hidden1)
#wide模型
#input wide模型的输入 hidden2 deep模型的输出
concat = keras.layers.concatenate([input,hidden2])
output = keras.layers.Dense(1)(concat)
#使用model把模型固定下来
model = keras.models.Model(inputs=[input],outputs=[output])

model.summary()
model.compile(loss = "mean_squared_error", optimizer = "adam")

callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
history = model.fit(x_train_scaled, y_train, validation_data = (x_valid_scaled, y_valid), epochs = 100, callbacks = callbacks)

2、子类API实现

单输入单输出

#子类API实现wide_deep模型
class WideDeepModel(keras.models.Model):
    def __init__(self):#初始化函数
        super(WideDeepModel,self).__init__()
        '''定义模型的层次'''
        self.hidden1_layer = keras.layers.Dense(30,activation='relu')
        self.hidden2_layer = keras.layers.Dense(30,activation='relu')
        self.output_layer = keras.layers.Dense(1)
    def call(self,input):
        '''
        重载此函数 完成模型的正向计算
        '''
        hidden1 = self.hidden1_layer(input)
        hidden2 = self.hidden2_layer(hidden1)
        concat = keras.layers.concatenate([input,hidden2])
        output = self.output_layer(concat)
        return output
#model = WideDeepModel()
model = keras.models.Sequential([
    WideDeepModel(),
])
model.build(input_shape=(None,8))
        
model.summary()
model.compile(loss = "mean_squared_error", optimizer = "adam")

callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
history = model.fit(x_train_scaled, y_train, validation_data = (x_valid_scaled, y_valid), epochs = 100, callbacks = callbacks)

3、多输入单输出

# 函数式API实现wide_deep模型
#多输入 前5个特征为wide模型的输入 后6个特征为deep模型的输入
input_wide = keras.layers.Input(shape=[5])
input_deep = keras.layers.Input(shape=[6])
hidden1 = keras.layers.Dense(30,activation = 'relu')(input_deep)
hidden2 = keras.layers.Dense(30,activation = 'relu')(hidden1)
concat = keras.layers.concatenate([input_wide,hidden2])
output = keras.layers.Dense(1)(concat)
model = keras.models.Model(inputs = [input_wide,input_deep],outputs=[output])

        
model.summary()
model.compile(loss = "mean_squared_error", optimizer = "adam")

callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
x_train_scaled_wide = x_train_scaled[:,:5]#wide取前5个feature
x_train_scaled_deep = x_train_scaled[:,2:]#deep取后6个feature
x_valid_scaled_wide = x_valid_scaled[:,:5]
x_valid_scaled_deep = x_valid_scaled[:,2:]
x_test_scaled_wide = x_test_scaled[:,:5]
x_test_scaled_deep = x_test_scaled[:,2:]
history = model.fit([x_train_scaled_wide,x_train_scaled_deep], y_train, validation_data = ([x_valid_scaled_wide,x_valid_scaled_deep], y_valid), epochs = 100, callbacks = callbacks)

4、多输入多输出

    
input_wide = keras.layers.Input(shape=[5])
input_deep = keras.layers.Input(shape=[6])
hidden1 = keras.layers.Dense(30,activation = 'relu')(input_deep)
hidden2 = keras.layers.Dense(30,activation = 'relu')(hidden1)
concat = keras.layers.concatenate([input_wide,hidden2])
output = keras.layers.Dense(1)(concat)
output2 = keras.layers.Dense(1)(hidden2)
model = keras.models.Model(inputs = [input_wide,input_deep],outputs=[output,output2])

        
model.summary()
model.compile(loss = "mean_squared_error", optimizer = "adam")

callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
x_train_scaled_wide = x_train_scaled[:,:5]#wide取前5个feature
x_train_scaled_deep = x_train_scaled[:,2:]#deep取后6个feature
x_valid_scaled_wide = x_valid_scaled[:,:5]
x_valid_scaled_deep = x_valid_scaled[:,2:]
x_test_scaled_wide = x_test_scaled[:,:5]
x_test_scaled_deep = x_test_scaled[:,2:]
history = model.fit([x_train_scaled_wide,x_train_scaled_deep],[y_train, y_train], validation_data = ([x_valid_scaled_wide,x_valid_scaled_deep], [y_valid,y_valid]), epochs = 100, callbacks = callbacks)

2、超参数搜索

1、手动实现超参数搜索

根据图像选取较好的超参数

#手动实现learning_rate的搜索[1e-4,3e-4,1e-3,3e-3,1e-2,3e-2]
#W = W + grad* learning_rate
learning_rate = [1e-4,3e-4,1e-3,3e-3,1e-2,3e-2]
# 对每个参数都需要定义模型
histories = []
for lr in learning_rate:
    model = keras.models.Sequential([
        keras.layers.Dense(30, activation = 'relu', input_shape = x_train.shape[1:]),
        keras.layers.Dense(1),
    ])
    optimizer = keras.optimizers.SGD(lr)
    
    model.compile(loss = "mean_squared_error", optimizer = optimizer)
    callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
    history = model.fit(x_train_scaled, y_train, validation_data = (x_valid_scaled, y_valid), epochs = 100, callbacks = callbacks)
    histories.append(history)

def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize=(8,5))
    plt.grid(True)
    plt.gca().set_ylim(0,1)
    plt.show()

for lr,history in zip(learning_rate,histories):
    print("learning rate:",lr)
    plot_learning_curves(history)

2、利用sklearn库实现自动超参数搜索

RandomizedSearchCV实现超参数搜索 分为三步:
1、tf.keras.model转化为sklearn的model 利用tf.keras.wrappers.scikit_learn.KerasRegressor

def build_model(hidden_layers = 1,#中间层数
                layer_size =30,#中间层大小
                learning_rate=1e-3):#学习率
    model = keras.models.Sequential()
    model.add(keras.layers.Dense(layer_size, activation = 'relu', input_shape = x_train.shape[1:]))
    for _ in range(hidden_layers-1):
        model.add(keras.layers.Dense(layer_size, activation = 'relu'))
    model.add(keras.layers.Dense(1))
    optimizer = keras.optimizers.SGD(learning_rate)
    model.compile(loss = "mean_squared_error", optimizer = optimizer)#  mean_squared_error == mse
    return model
sklearn_model = keras.wrappers.scikit_learn.KerasRegressor(build_model)
callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
history = sklearn_model.fit(x_train_scaled,y_train,
                            epochs = 100,
                            validation_data = (x_valid_scaled,y_valid),
                            callbacks = callbacks)


2、定义参数集合

以上完成了第一步
下面开始第二步,进行参数集合的定义
要搜索的参数为hidden_layers = 1、layer_size =30、learning_rate=1e-3

from scipy.stats import reciprocal
# f(x) = 1/(x*log(b/a)) a<=x<=b f(x)为概率 reciprocal的分布函数
param_distribution = {
    "hidden_layers":[1,2,3,4],
    "layer_size":np.arange(1,100),
    #希望learning_rate为连续取值 定义区间即可
    "learning_rate":reciprocal(1e-4,1e-2),
}


3、使用RandomizedSearchCV实现超参数搜索

from sklearn.model_selection import RandomizedSearchCV

random_search_cv = RandomizedSearchCV(sklearn_model,
                                      param_distribution,
                                     n_iter = 10,#从param_distribution散播出来的参数集合的数目
                                      #cv = 3,通过此修改cross_validation的n
                                     n_jobs =1)#并行处理的数目
random_search_cv.fit(x_train_scaled,y_train,epochs = 100,
                        validation_data = (x_valid_scaled,y_valid),
                        callbacks = callbacks)
#进行超参数搜索时 使用了 cross_validation的机制
#cross_validation:训练集分为n-1份 1份用于验证
#超参搜索完之后再用所有数据训练一次

最后,对得到的模型进行评估

print(random_search_cv.best_params_)#最好的参数
print(random_search_cv.best_score_)#最好的值
print(random_search_cv.best_estimator_)#最好的model

model = random_search_cv.best_estimator_.model
model.evaluate(x_test_scaled, y_test)

 

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值