用Tensorflow可以很方便的实现深度循环神经网络。不过训练速度慢、效果差。可能是我没有研究明白深度循环神经网络的正确用法。
用LSTM和GRU已经足够了。深度循环神经网络的算法,还是有待提高的。
import tensorflow as tf
from tensorflow import keras
import numpy as np
max_features=89000
maxlen = 450
batch_size=64
cell_size=8
n_layers = 3
def my_load_data(path='imdb.npz'):
origin_folder = 'https://storage.googleapis.com/tensorflow/tf-keras-datasets/'
path = tf.keras.utils.get_file(
path,
origin=origin_folder + 'imdb.npz',
cache_dir='DataSet/',
cache_subdir=""
)
with np.load(path, allow_pickle=True) as f:
x_train, y_train = f['x_train'], f['y_train']
x_test, y_test = f['x_test'], f['y_test']
return (x_train, y_train), (x_test, y_test)
(x_train, y_train), (x_test, y_test) = my_load_data(path='imdb.npz')
x_train = keras.preprocessing.sequence.pad_sequences(x_train,maxlen=maxlen)
x_test = keras.preprocessing.sequence.pad_sequences(x_test,maxlen=maxlen)
model = keras.Sequential()
model.add(keras.layers.Embedding(input_dim=max_features,output_dim=cell_size,input_length=maxlen))
cells = [keras.layers.LSTMCell(cell_size) for _ in range(n_layers)]
model.add(keras.layers.RNN(keras.layers.StackedRNNCells(cells),
#stateful=False,
return_sequences=True,
#return_state=False
))
model.add(keras.layers.Dense(1,activation='sigmoid'))
model.compile(#optimizer='rmsprop',
optimizer='adam',
loss='binary_crossentropy',
metrics=['acc'])
model.fit(x_train, y_train, batch_size=batch_size, epochs=3)
loss, accuracy = model.evaluate(x_test, y_test)
print('test loss', loss)
print('test accuracy', accuracy)