Reputation: 363
I am trying to record summary statistics for precision and recall with tensorflow to use with tensor-board with the below code.
I have added both global and local variables initializers, however this still throws an error telling me that i have an uninitialized value for 'recall'.
Does anyone have any ideas on why this is still throwing an error?
Error message is below the code block
def classifier_graph(x, y, learning_rate=0.1):
with tf.name_scope('classifier'):
with tf.name_scope('model'):
W = tf.Variable(tf.zeros([xdim, ydim]), name='W')
b = tf.Variable(tf.zeros([ydim]), name='b')
y_ = tf.matmul(x, W) + b
with tf.name_scope('cross_entropy'):
diff = tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=y_)
cross_entropy = tf.reduce_mean(diff)
summary = tf.summary.scalar('cross_entropy', cross_entropy)
with tf.name_scope('train'):
#cross_entropy = tf.reduce_mean(-tf.reduce_sum(y * tf.log(y_), reduction_indices=[1]), name='cross_entropy')
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(cross_entropy)
# minimise cross_entropy via GD
#with tf.name_scope('init'):
#init = tf.global_variables_initializer()
#local_init = tf.local_variables_initializer()
#init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer())
with tf.name_scope('init'):
init = tf.global_variables_initializer()
init_l = tf.local_variables_initializer()
with tf.name_scope('metrics'):
recall = tf.metrics.recall(y, y_ )
precision = tf.metrics.precision(y, y_)
v_rec = tf.summary.scalar('recall', recall)
v_prec = tf.summary.scalar('precision', precision)
metrics = tf.summary.merge_all()
return [W, b, y_, cross_entropy, train_step, init, init_l, metrics]
def train_classifier(insamples, outsamples, batch_size, iterations, feature_set_index=1, model=None, device):
x = tf.placeholder(tf.float32, [None, xdim], name='x') # None indications arbitrary first dimension
y = tf.placeholder(tf.float32, [None, ydim], name='y')
W, b, y_, cross_entropy, train_step, init, init_l, metrics = classifier_graph(x, y)
with tf.Session(config=config) as sess, tf.device(device):
sess.run(init)
sess.run(init_l)
file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())
t = 0
while t < iterations:
t += 1
_, err, metrics_str = sess.run([train_step, cross_entropy, metrics], feed_dict={x: batch_x, y: batch_y })
all_err.append(err)
file_writer.add_summary(metrics_str,t)
return 'Done'
The exact error message is below:
FailedPreconditionError (see above for traceback): Attempting to use uninitialized value recall/true_positives/count
[[Node: recall/true_positives/count/read = Identity[T=DT_FLOAT, _class=["loc:@recall/true_positives/count"], _device="/job:localhost/replica:0/task:0/gpu:0"](recall/true_positives/count)]]
Thanks!
EDIT:
Upon making the changes suggested by @Ishant Mrinal below, I encounter an error which i was previously hitting:
InvalidArgumentError (see above for traceback): tags and values not the same shape: [] != [2] (tag 'precision_1')
This suggests that the precision tensor is a different shape to the others, it does not throw this error for cross-entropy or recall.
Upvotes: 0
Views: 1020
Reputation: 5206
The second problem is caused by the fact that tf.metrics
returns two tensors. Instead, do
_, recall = tf.metrics.recall(y, y_ )
_, precision = tf.metrics.precision(y, y_)
v_rec = tf.summary.scalar('recall', recall)
v_prec = tf.summary.scalar('precision', precision)
Upvotes: 2
Reputation: 4918
Its because of two initialization lines placement, move these two lines to the train_classifier
function. The error suggests some of the variables aren't initialized.
def train_classifier(...):
...
init = tf.global_variables_initializer()
init_l = tf.local_variables_initializer()
with tf.Session(config=config) as sess, tf.device(device):
sess.run(init)
sess.run(init_l)
Upvotes: 0