Reputation: 59
I'm trying to make multi-layer RNN without using MultiRNNCell, because I want to update each layer independently. So I didn't use tf.dynamic_rnn.
with tf.variable_scope("cell"):
with tf.variable_scope("cell_1", reuse=True):
cell_1 = tf.contrib.rnn.BasicLSTMCell(n_hidden)
states_1 = cell_1.zero_state(batch_size, tf.float32)
with tf.variable_scope("cell_2", reuse=True):
cell_2 = tf.contrib.rnn.BasicLSTMCell(n_hidden)
states_2 = cell_2.zero_state(batch_size, tf.float32)
with tf.variable_scope("cell_3", reuse=True):
cell_3 = tf.contrib.rnn.BasicLSTMCell(n_hidden)
states_3 = cell_3.zero_state(batch_size, tf.float32)
outputs_1=[]
outputs_2=[]
outputs_3=[]
with tf.variable_scope("architecture"):
for i in range(n_step):
output_1, states_1 = cell_1(X[:, i], states_1)
output_2, states_2 = cell_2(output_1, states_2)
output_3, states_3 = cell_3(output_2, states_3)
outputs_3.append(output_3)
Then I got error like this.
ValueError: Variable architecture/basic_lstm_cell/kernel already exists, disallowed. Did you mean to set reuse=True in VarScope?
So it seems like impossible to declare multiple cell in tensorflow without MultiRNNCell. How can I solve this?
Upvotes: 1
Views: 229
Reputation: 59
I solved the problem and share the answer.
cell = tf.contrib.rnn.BasicLSTMCell(n_hidden)
cell2 = tf.contrib.rnn.BasicLSTMCell(n_hidden)
cell3 = tf.contrib.rnn.BasicLSTMCell(n_hidden)
states = cell.zero_state(batch_size, tf.float32)
states2 = cell2.zero_state(batch_size, tf.float32)
states3 = cell3.zero_state(batch_size, tf.float32)
outputs=[]
for i in range(n_step):
with tf.variable_scope("cell1"):
output, states = cell(X[:, i], states)
with tf.variable_scope("cell2"):
output2, states2 = cell2(output, states2)
with tf.variable_scope("cell3"):
output3, states3 = cell3(output2, states3)
outputs.append(output3)
Upvotes: 1