reader里datashape从[160,32]改为[640,32],则
class CRNN:
def __init__(self, data, label, rnnSeqLengths, rnnBatch, conf, isTraining, keepProb, reuse):
self.data = data
self.label = label
self.rnnSeqLengths = rnnSeqLengths
self.rnnBatch = rnnBatch
self.conf = conf
self.isTraining = isTraining
self.keepProb = keepProb
net = DenseNet(self.data, 2, 12, self.isTraining, dropout=1-self.keepProb, reuse=reuse).model
net = tf.transpose(net, perm=[0, 2, 1, 3])
# self.word_vec = tf.reshape(net, [1, -1, 1584])
self.word_vec = tf.reshape(net, [-1, 20, 816])
#print 'self.word_vec', self.word_vec.shape
self.lstmLayers(reuse=reuse)
改为:
class CRNN:
def __init__(self, data, label, rnnSeqLengths, rnnBatch, conf, isTraining, keepProb, reuse):
self.data = data
self.label = label
self.rnnSeqLengths = rnnSeqLengths
self.rnnBatch = rnnBatch
self.conf = conf
self.isTraining = isTraining
self.keepProb = keepProb
net = DenseNet(self.data, 2, 12, self.isTraining, dropout=1-self.keepProb, reuse=reuse).model
net = tf.transpose(net, perm=[0, 2, 1, 3])
# self.word_vec = tf.reshape(net, [1, -1, 1584])
self.word_vec = tf.reshape(net, [-1, 80, 816])
#print 'self.word_vec', self.word_vec.shape
self.lstmLayers(reuse=reuse)