from keras.layers import Input,Layer,LSTMCell
from keras.layers import LSTM,Bidirectional,RNN
from numpy import array
from keras.models import Model
from keras import initializers
kernel_initializer=initializers.random_uniform(seed=2)
data = array([0.1,0.1,0.1]).reshape((1,3,1))
inputs1 = Input(shape=(3,1))
inputs2 = Input(shape=(3,1))
cell=LSTMCell(2,kernel_initializer=kernel_initializer,recurrent_initializer=kernel_initializer)
layer = RNN(cell,name="RNN",trainable=True)
y = layer(inputs1)
aa=LSTM(2,name="LSTM",kernel_initializer=kernel_initializer,recurrent_initializer=kernel_initializer,trainable=True)(inputs2)
model1 = Model(inputs=inputs1,outputs=y)
model2 = Model(inputs=inputs2,outputs=aa)
model1.summary()
model2.summary()
#获得某一层的权重和偏置
weight_Dense_1 = model1.get_layer('RNN').get_weights()
weight_Dense_2 = model2.get_layer('LSTM').get_weights()
print("RNN*"*50)
print(weight_Dense_1)
print("LSTM*"*50)
print(weight_Dense_2)
print("model4--",model1.predict(data))
print("model44--",model2.predict(data))
class MinimalRNNCell(Layer):
def __init__(self, units, **kwargs):
self.units = units
self.state_size = units
super(MinimalRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = K.dot(inputs, self.kernel)
output = h + K.dot(prev_output, self.recurrent_kernel)
return output, [output]
data = array([0.1,0.1,0.1]).reshape((1,3,1))
inputs1 = Input(shape=(3,1))
cell=MinimalRNNCell(2)
layer = RNN(cell,name="RNN",trainable=True)
y = layer(inputs1)
model1 = Model(inputs=inputs1,outputs=y)
model1.summary()
#获得某一层的权重和偏置
weight_Dense_1 = model1.get_layer('RNN').get_weights()
print("RNN*"*50)
print(weight_Dense_1)
print("model1--",model1.predict(data))