Reputation: 1859
I am building a graph via a function and am trying to extract the value of a variable to add further operations. A part of the function I have written is shown below :
def build(self, save_path=None, save_name=None):
g = tf.Graph()
with g.as_default():
init_op = tf.initialize_all_variables()
images = tf.placeholder(tf.float32, shape=[None, 300, 300, 3], name='input')
with tf.variable_scope('conv1_'):
conv11 = self.conv_relu(images, kernel_shape=[3, 3, 3, 64], bias_shape=64, name='c1')
conv12 = self.conv_relu(conv11, kernel_shape=[3, 3, 64, 64], bias_shape=64, name='c2')
pool1 = tf.nn.max_pool(conv12, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool1')
with tf.variable_scope('conv2_'):
conv21 = self.conv_relu(pool1, kernel_shape=[3, 3, 64, 128], bias_shape=128, name='c1')
conv22 = self.conv_relu(conv21, kernel_shape=[3, 3, 128, 128], bias_shape=128, name='c2')
pool2 = tf.nn.max_pool(conv22, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool2')
with tf.variable_scope('conv3_'):
conv31 = self.conv_relu(pool2, kernel_shape=[3, 3, 128, 256], bias_shape=256, name='c1')
conv32 = self.conv_relu(conv31, kernel_shape=[3, 3, 256, 256], bias_shape=256, name='c2')
conv33 = self.conv_relu(conv32, kernel_shape=[3, 3, 256, 256], bias_shape=256, name='c3')
pool3 = tf.nn.max_pool(conv33, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool3')
with tf.variable_scope('conv4_'):
conv41 = self.conv_relu(pool3, kernel_shape=[3, 3, 256, 512], bias_shape=512, name='c1')
conv42 = self.conv_relu(conv41, kernel_shape=[3, 3, 512, 512], bias_shape=512, name='c2')
conv43 = self.conv_relu(conv42, kernel_shape=[3, 3, 512, 512], bias_shape=512, name='c3')
pool4 = tf.nn.max_pool(conv43, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool4')
with tf.variable_scope('conv5_'):
conv51 = self.conv_relu(pool4, kernel_shape=[3, 3, 512, 512], bias_shape=512, name='c1')
conv52 = self.conv_relu(conv51, kernel_shape=[3, 3, 512, 512], bias_shape=512, name='c2')
conv53 = self.conv_relu(conv52, kernel_shape=[3, 3, 512, 512], bias_shape=512, name='c3')
pool5 = tf.nn.max_pool(conv53, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name='pool5')
pool5_shape = tf.shape(pool5)
pool5_reshaped = tf.reshape(pool5, shape=[pool5_shape[0], -1], name='pool5_reshaped')
weight_rows = pool5_shape[1] * pool5_shape[2] * pool5_shape[3]
sess = tf.Session(graph=g)
inp = np.zeros(shape=(2, 300, 300, 3))
print(inp.shape)
sess.run(init_op)
print(sess.run(weight_rows, feed_dict={images:inp}))
sess.close()
At the line print(sess.run(weight_rows, feed_dict={images:inp}))
i get the following error :
FailedPreconditionError (see above for traceback): Attempting to use uninitialized value conv5_/biasesc3
[[Node: conv5_/biasesc3/read = Identity[T=DT_FLOAT, _class=["loc:@conv5_/biasesc3"], _device="/job:localhost/replica:0/task:0/cpu:0"](conv5_/biasesc3)]]
What is the reason for this error when I have run the init_op operation in the session before ? Exactly how does this work and what am I doing wrong over here ?
Upvotes: 1
Views: 41
Reputation: 1065
You need to define your init_op
(i.e. call tf.initialize_all_variables()) after you declared all variables.
Creating a variable via tf.get_variable
or tf.Variable
places it in GLOBAL_VARIABLES collection (unless otherwise specified with collections
kwarg). tf.initialize_all_variables()
takes a look at this collection and creates an op that initializes variables listed.
To see GLOBAL_VARIABLES collection, you can use tf.get_collection
with tf.GraphKeys.GLOBAL_VARIABLES
as argument.
TL;DR Place init_op = tf.initialize_all_variables()
after the graph was created.
Upvotes: 1