hlcp
hlcp

Reputation: 37

How to implement this loss in keras

I want to implement this loss like this: Loss

This is code:

def loss(output, target, from_logits=False):


        L1 = - tf.reduce_sum(target * tf.log(output), reduction_indices=len(output.get_shape()) - 1)
        L2 = tf.reduce_sumr(tf.square(tf.subtract(tf.reduce_sum(tf.multiply(j,output),  reduction_indices=len(output.get_shape()) - 1), y))))
        L3 = tf.reduce_sum(tf.reduce_sum(tf.multiply(output,tf.square(tf.subtract(j,tf.reduce_prod(tf.multiply(k,p<sub>i,k</sub>)))))))
        loss = L1 + L2 + L3
        return loss

Please hep me. Thank you very much.

Upvotes: 2

Views: 232

Answers (1)

Daniel M&#246;ller
Daniel M&#246;ller

Reputation: 86600

  • Supposing you have y_true with shape (samples, 11), one hot encoded.
  • Supposing that you are using a softmax (classes sum = 1) activation in the last layer

A loss in Keras has the form def func(y_true, y_pred):

import keras.backend as K
from keras.losses import categorical_crossentropy

def loss(y_true, y_pred):

    #p
    p = y_pred                                        #(samples,11)

    #j or k
    j = K.cast_to_floatx(K.arange(10,21))             #(11,)
    j = K.reshape(j, (1,-1))                    #(1,11)

    #y_true as class number
    y = K.sum(y_true * j, axis=-1, keepdims=True)    #(samples, 1)

    #first term:
    L1 = categorical_crossentropy(y_true, y_pred)     #(samples,)

    #second term with y outstide sum in j
    kpk = j * p                                   #(samples, 11)
    kpkSum = K.sum(kpk, axis=-1, keepdims=True)   #(samples, 1)
    L2 = kpkSum - y                               #(samples, 1)
    L2 = K.square(L2) / 2.                        #(samples, 1)

    #third term:
    L3 = K.square(j - kpkSum)                      #(samples,11)
    L3 = p * L3                                    #(samples,11)
    L3 = K.sum(L3, axis=-1)                        #(samples,)

    return L1 + L2 + L3 #the mean in N is automatic by Keras

Upvotes: 1

Related Questions