layers.Dense的使用方法:
1.Sequence中的使用
layer = tf.keras.layers.Dense(100)
layer = tf.keras.layers.Dense(100,input_shape = [None,5])
#第一层有一个输入,None表示样本数
2. 函数式调用
layer(tf.zeros([10,5])) #输入(10,5),输出(10,100),
layer.variables # x * w + b : w--kernel b--bias
layer.trainable_variables #获得可训练的变量:kernel,bias
help(layer) # 查看layer的一些方法
通过继承layer类的方式实现自定义层。
import matplotlib as mpl
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras
print(tf.__version__)
2.0.0
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
from sklearn.model_selection import train_test_split
x_train_all, x_test, y_train_all, y_test = train_test_split(
housing.data, housing.target, random_state = 7)
x_train, x_valid, y_train, y_valid, = train_test_split(
x_train_all, y_train_all, random_state = 11)
print(x_train.shape, y_train.shape)
print(x_valid.shape, y_valid.shape)
print(x_test.shape, y_test.shape)
(11610, 8) (11610,)
(3870, 8) (3870,)
(5160, 8) (5160,)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)
x_valid_scaled = scaler.transform(x_valid)
x_test_scaked = scaler.transform(x_test)
#自定义网络层次 customized dense layer
#通过继承类的方式实现,继承layers类
class CustomizedDenseLayer(keras.layers.Layer):
'''初始化'''
def __init__(self,units,activation=None,**kwargs):
self.units = units # 输出
self.activation = keras.layers.Activation(activation) # 激活
super(CustomizedDenseLayer,self).__init__(**kwargs) # 调用父类初始化函数
def build(self,input_shape):
'''构建所需要的参数,负责初始化参数'''
# x * w + b input_shape:[None,a] -> w:[a,b] -> output_shape:[None,b]
self.kernel = self.add_weight(name = 'kernel',
shape = (input_shape[1],self.units),
initializer = 'uniform',
trainable = True)
self.bias = self.add_weight(name = 'bias',
shape = (self.units,),
initializer = 'zeros',
trainable = True)
super(CustomizedDenseLayer,self).build(input_shape) # 调用父类build函数
def call(self,x):
'''完成正向计算'''
return self.activation(x @ self.kernel + self.bias)
#对于没有参数简单函数定义为层次时没有必要定义类,可通过Lambda定义
#tf.nn.softplus : log(1+e^x) 激活函数
customized_softplus = keras.layers.Lambda(lambda x : tf.nn.softplus(x))
print(customized_softplus([-10.,-5.,0.,5.,10.]))
model = keras.models.Sequential([
CustomizedDenseLayer(30, activation='relu',
input_shape=x_train.shape[1: ]),
CustomizedDenseLayer(1),
customized_softplus, # 添加一个激活函数层
#等价于 keras.layers.Dense(1,activation = 'softplus')
#keras.layers.Dense(1),keras.layers.Activation('softplus')
])
model.summary()
model.compile(loss='mean_squared_error', optimizer="sgd",
metrics = ["acc"])
callbacks = [keras.callbacks.EarlyStopping(
patience=5, min_delta=1e-2)]
tf.Tensor([4.5417706e-05 6.7153489e-03 6.9314718e-01 5.0067153e+00 1.0000046e+01], shape=(5,), dtype=float32)
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
customized_dense_layer (Cust (None, 30) 270
_________________________________________________________________
customized_dense_layer_1 (Cu (None, 1) 31
_________________________________________________________________
lambda_1 (Lambda) (None, 1) 0
=================================================================
Total params: 301
Trainable params: 301
Non-trainable params: 0
_________________________________________________________________
history = model.fit(x_train_scaled, y_train,
validation_data = (x_valid_scaled, y_valid),
epochs = 100,
callbacks = callbacks)