两种方式实现wide & deep模型

1.子类API实现wide & deep模型

Wide输入和Deep输入时一样的

import tensorflow as tf
import matplotlib as mpl
import matplotlib.pyplot as plt

import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
from tensorflow import keras

# 获取数据集 加利福利亚房价的数据集
from sklearn.datasets import fetch_california_housing

housing = fetch_california_housing()
print(housing.DESCR)#总共 20640 个数据,每个数据都有8个特征 MedInc、HouseAge、AveRooms、AveBedrms、Population、AveOccup、Latitude      、Longitude
print(housing.data.shape)#相当于x_data:结构 (20640*8)
print(housing.target.shape)#相当于y_data:index(20640个值)

import pprint
pprint.pprint(housing.data[0:5])#5 行 8 列
pprint.pprint(housing.target[0:5])#5 行 1 列

from sklearn.model_selection import train_test_split
x_train_all,x_test,y_train_all,y_test = train_test_split(
    housing.data,housing.target,random_state=7)#按3比1比例划分,可以添加test_size = 0.25 来修改比例。拆分成 全部集和测试集
x_train,x_valid,y_train,y_valid = train_test_split(
    x_train_all,y_train_all,random_state=11)#训练集 和 验证集

#查看数据的大小
print(x_train.shape,y_train.shape)
print(x_valid.shape,y_train.shape)
print(x_test.shape,y_test.shape)

#数据归一化
from sklearn.preprocessing import StandardScaler

scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)#在训练集上获得均值和方差
x_valid_scaled = scaler.fit_transform(x_valid)
x_test_scaled = scaler.fit_transform(x_test)

# 函数式API 功能API
'''input = keras.layers.Input(shape=x_train.shape[1:])
hidden1 = keras.layers.Dense(30, activation='relu')(input)
hidden2 = keras.layers.Dense(30, activation='relu')(hidden1)# 复合函数: f(x) = h(g(x))
concat = keras.layers.concatenate([input, hidden2])
output = keras.layers.Dense(1)(concat)
model = keras.models.Model(inputs = [input],outputs = [output])
'''
class WideDeepModel(keras.models.Model):
    def __init__(self):
        super(WideDeepModel,self).__init__()
        '''定义模型的层次'''
        self.hidden1_layer = keras.layers.Dense(30,activation="relu")
        self.hidden2_layer = keras.layers.Dense(30,activation="relu")
        self.output_layer = keras.layers.Dense(1)#全连接层
    def call(self,input):
        '''完成模型的正向计算'''
        hidden1 = self.hidden1_layer(input)
        hidden2 = self.hidden2_layer(hidden1)
        concat = keras.layers.concatenate([input,hidden2])
        output = self.output_layer(concat)
        return output
#两种方式:model = WideDeepModel()
model = keras.models.Sequential([
    WideDeepModel(),
])
model.build(input_shape = (None,8))

model.summary()
model.compile(loss="mean_squared_error", optimizer="adam")
callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]

history = model.fit(x_train_scaled, y_train,
                    validation_data = (x_valid_scaled,y_valid),
                    epochs = 100,
                    callbacks=callbacks)

def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize = (8,5))
    plt.grid(True)
    plt.gca().set_ylim(0,1)
    plt.show()
plot_learning_curves(history)

model.evaluate(x_test_scaled,y_test)

运行结果:
在这里插入图片描述
在这里插入图片描述
结果跟之前差不多……

2.多输入实现模型(Wide 和 Deep输入不一样)

import tensorflow as tf
import matplotlib as mpl
import matplotlib.pyplot as plt

import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
from tensorflow import keras

# 获取数据集 加利福利亚房价的数据集
from sklearn.datasets import fetch_california_housing

housing = fetch_california_housing()
print(housing.DESCR)#总共 20640 个数据,每个数据都有8个特征 MedInc、HouseAge、AveRooms、AveBedrms、Population、AveOccup、Latitude      、Longitude
print(housing.data.shape)#相当于x_data:结构 (20640*8)
print(housing.target.shape)#相当于y_data:index(20640个值)

import pprint
pprint.pprint(housing.data[0:5])#5 行 8 列
pprint.pprint(housing.target[0:5])#5 行 1 列

from sklearn.model_selection import train_test_split
x_train_all,x_test,y_train_all,y_test = train_test_split(
    housing.data,housing.target,random_state=7)#按3比1比例划分,可以添加test_size = 0.25 来修改比例。拆分成 全部集和测试集
x_train,x_valid,y_train,y_valid = train_test_split(
    x_train_all,y_train_all,random_state=11)#训练集 和 验证集

#查看数据的大小
print(x_train.shape,y_train.shape)
print(x_valid.shape,y_train.shape)
print(x_test.shape,y_test.shape)

#数据归一化
from sklearn.preprocessing import StandardScaler

scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)#在训练集上获得均值和方差
x_valid_scaled = scaler.fit_transform(x_valid)
x_test_scaled = scaler.fit_transform(x_test)

# 定义模型 实现多输入
input_wide = keras.layers.Input(shape=[5])
input_deep = keras.layers.Input(shape=[6])
hidden1 = keras.layers.Dense(30,activation="relu")(input_deep)
hidden2 = keras.layers.Dense(30,activation="relu")(hidden1)
concat = keras.layers.concatenate([input_wide,hidden2])
output = keras.layers.Dense(1)(concat)
model = keras.models.Model(inputs = [input_wide,input_deep],
                          outputs = [output])

model.summary()
model.compile(loss="mean_squared_error", optimizer="adam")
callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]

#对训练数据做拆分
x_train_scaled_wide = x_train_scaled[:,:5]#前 5 个feature
x_train_scaled_deep = x_train_scaled[:,2:]#后 6 个feature
x_valid_scaled_wide = x_valid_scaled[:,:5]
x_valid_scaled_deep = x_valid_scaled[:,2:]
x_test_scaled_wide = x_test_scaled[:,:5]
x_test_scaled_deep = x_test_scaled[:,2:]

history = model.fit([x_train_scaled_wide,x_train_scaled_deep], y_train,
                    validation_data = ([x_valid_scaled_wide,x_valid_scaled_deep],y_valid),
                    epochs = 100,
                    callbacks=callbacks)

def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize = (8,5))
    plt.grid(True)
    plt.gca().set_ylim(0,1)
    plt.show()
plot_learning_curves(history)

model.evaluate([x_test_scaled_wide,x_test_scaled_deep],y_test)

在这里插入图片描述
在这里插入图片描述

3.多输出的神经网络

import tensorflow as tf
import matplotlib as mpl
import matplotlib.pyplot as plt

import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
from tensorflow import keras

# 获取数据集 加利福利亚房价的数据集
from sklearn.datasets import fetch_california_housing

housing = fetch_california_housing()
print(housing.DESCR)#总共 20640 个数据,每个数据都有8个特征 MedInc、HouseAge、AveRooms、AveBedrms、Population、AveOccup、Latitude      、Longitude
print(housing.data.shape)#相当于x_data:结构 (20640*8)
print(housing.target.shape)#相当于y_data:index(20640个值)

import pprint
pprint.pprint(housing.data[0:5])#5 行 8 列
pprint.pprint(housing.target[0:5])#5 行 1 列

from sklearn.model_selection import train_test_split
x_train_all,x_test,y_train_all,y_test = train_test_split(
    housing.data,housing.target,random_state=7)#按3比1比例划分,可以添加test_size = 0.25 来修改比例。拆分成 全部集和测试集
x_train,x_valid,y_train,y_valid = train_test_split(
    x_train_all,y_train_all,random_state=11)#训练集 和 验证集

#查看数据的大小
print(x_train.shape,y_train.shape)
print(x_valid.shape,y_train.shape)
print(x_test.shape,y_test.shape)

#数据归一化
from sklearn.preprocessing import StandardScaler

scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)#在训练集上获得均值和方差
x_valid_scaled = scaler.fit_transform(x_valid)
x_test_scaled = scaler.fit_transform(x_test)

# 定义模型 实现多输入
input_wide = keras.layers.Input(shape=[5])
input_deep = keras.layers.Input(shape=[6])
hidden1 = keras.layers.Dense(30,activation="relu")(input_deep)
hidden2 = keras.layers.Dense(30,activation="relu")(hidden1)
concat = keras.layers.concatenate([input_wide,hidden2])
#两个输出
output = keras.layers.Dense(1)(concat)
output2 = keras.layers.Dense(1)(hidden2)

model = keras.models.Model(inputs = [input_wide,input_deep],
                          outputs = [output,output2])

model.summary()
model.compile(loss="mean_squared_error", optimizer="adam")
callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]

#对训练数据做拆分
x_train_scaled_wide = x_train_scaled[:,:5]#前 5 个feature
x_train_scaled_deep = x_train_scaled[:,2:]#后 6 个feature
x_valid_scaled_wide = x_valid_scaled[:,:5]
x_valid_scaled_deep = x_valid_scaled[:,2:]
x_test_scaled_wide = x_test_scaled[:,:5]
x_test_scaled_deep = x_test_scaled[:,2:]

history = model.fit([x_train_scaled_wide,x_train_scaled_deep], [y_train,y_train],
                    validation_data = ([x_valid_scaled_wide,x_valid_scaled_deep],
                                       [y_valid,y_valid]),
                    epochs = 100,
                    callbacks=callbacks)

def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize = (8,5))
    plt.grid(True)
    plt.gca().set_ylim(0,1)
    plt.show()
plot_learning_curves(history)

model.evaluate([x_test_scaled_wide,x_test_scaled_deep],[y_test,y_test])

运行结果:
在这里插入图片描述
在这里插入图片描述

  • 2
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值