Reputation: 13846
How do I get sensitivity and specificity, when class_mode='binary'
? - My current solution works for class_mode='categorical'
:
from keras.callbacks import Callback
import numpy as np
from sklearn.metrics import confusion_matrix
class SensitivitySpecificityCallback(Callback):
def on_epoch_end(self, epoch, logs=None):
if epoch:
x_test, y_test = self.validation_data[0], self.validation_data[1]
predictions = self.model.predict(x_test)
output_sensitivity_specificity(epoch, predictions, y_test)
def output_sensitivity_specificity(epoch, predictions, y_test):
y_test = np.argmax(y_test, axis=-1)
predictions = np.argmax(predictions, axis=-1)
c = confusion_matrix(y_test, predictions)
print('Confusion matrix:\n', c)
print('[{:03d}] sensitivity'.format(epoch), c[0, 0] / (c[0, 1] + c[0, 0]))
print('[{:03d}] specificity'.format(epoch), c[1, 1] / (c[1, 1] + c[1, 0]))
82 source lines full code example (Python 2 & 3 compatible)
All the output is wrong:
Confusion matrix:
[[40]]
Traceback (most recent call last):
File "network.py", line 118, in <module>
callbacks=[SensitivitySpecificityCallback()], verbose=1)
File "lib/python2.7/site-packages/keras/legacy/interfaces.py", line 91, in wrapper
return func(*args, **kwargs)
File "lib/python2.7/site-packages/keras/engine/training.py", line 1426, in fit_generator
initial_epoch=initial_epoch)
File "lib/python2.7/site-packages/keras/engine/training_generator.py", line 229, in fit_generator
callbacks.on_epoch_end(epoch, epoch_logs)
File "lib/python2.7/site-packages/keras/callbacks.py", line 77, in on_epoch_end
callback.on_epoch_end(epoch, logs)
File "network.py", line 56, in on_epoch_end
output_sensitivity_specificity(epoch, predictions, y_test)
File "network.py", line 64, in output_sensitivity_specificity
print('[{:03d}] sensitivity'.format(epoch), c[0, 0] / (c[0, 1] + c[0, 0]))
IndexError: index 1 is out of bounds for axis 1 with size 1
Upvotes: 1
Views: 1353
Reputation: 33440
Since in binary mode you are essentially predicting one value which indicates the probability of positive class (i.e. binary classification), using .argmax()
on predictions always return 0
. Therefore, you need to modify your output_sensitivity_specificity
function for this case:
def output_sensitivity_specificity(epoch, predictions, y_test, mode='binary'):
if mode == 'binary':
# determine positive class predictions
idx = predictions >= 0.5
predictions = np.zeros(predictions.shape)
predictions[idx] = 1
# no need to modify y_test since it consists of zeros and ones already
else:
y_test = np.argmax(y_test, axis=-1)
predictions = np.argmax(predictions, axis=-1)
c = confusion_matrix(y_test, predictions)
print('Confusion matrix:\n', c)
print('[{:03d}] sensitivity'.format(epoch), c[0, 0] / (c[0, 1] + c[0, 0]))
print('[{:03d}] specificity'.format(epoch), c[1, 1] / (c[1, 1] + c[1, 0]))
Just pass mode=class_mode
when calling output_sensitivity_specificity
in your callback and it would work for both binary and categorical modes.
Upvotes: 1