2017-06-17 64 views
-1

使用MultiRNNCell - 形状不匹配初始化birdirectional_dynamic_rnn时出现错误。使用MultiRNNCell - 形状不匹配初始化birdirectional_dynamic_rnn时出错

我的代码是:

def __init__(self, args): 
    self.args = args 
    self.input_data = tf.placeholder(tf.float32, [None, args.sentence_length, args.word_dim]) 
    self.output_data = tf.placeholder(tf.float32, [None, args.sentence_length, args.class_size]) 
    with tf.variable_scope('forward'): 
     fw_cell = tf.contrib.rnn.LSTMCell(args.rnn_size, state_is_tuple=True) 
     fw_cell = tf.contrib.rnn.DropoutWrapper(fw_cell, output_keep_prob=0.5) 
     #print("ff", fw_cell.get_shape()) 
     fw_cell = tf.contrib.rnn.MultiRNNCell([fw_cell] * args.num_layers, state_is_tuple=True) 
     #print("fw", fw_cell.get_shape()) 
    with tf.variable_scope('backward'):  
     bw_cell = tf.contrib.rnn.LSTMCell(args.rnn_size, state_is_tuple=True) 
     bw_cell = tf.contrib.rnn.DropoutWrapper(bw_cell, output_keep_prob=0.5) 
     bw_cell = tf.contrib.rnn.MultiRNNCell([bw_cell] * args.num_layers, state_is_tuple=True) 
    words_used_in_sent = tf.sign(tf.reduce_max(tf.abs(self.input_data), reduction_indices=2)) 
    self.length = tf.cast(tf.reduce_sum(words_used_in_sent, reduction_indices=1), tf.int32) 
    output, _= tf.nn.bidirectional_dynamic_rnn(fw_cell_1, bw_cell_1, 
              self.input_data, dtype=tf.float32) 

其中word_dim是311,class_size是5,rnn_size为256,num_layers是2,sentence_length是25

这是错误:

error is ValueError: Trying to share variable bidirectional_rnn/fw/multi_rnn_cell/cell_0/lstm_cell/kernel, but specified shape (512, 1024) and found shape (567, 1024). 

回答

0

fw_cell和bw_cell的rnn_size必须与input_data的word_dim 相同,但是您的word_dim是311和rnn_size是256