def binary_crossentropy(y_true, y_pred):
return K.mean(K.binary_crossentropy(tf.multiply(y_pred, tf.cast(tf.not_equal(y_true, -1), tf.float32)),
tf.multiply(y_true, tf.cast(tf.not_equal(y_true, -1), tf.float32))), axis=-1)
def binary_accuracy(y_true, y_pred):
t0 = tf.equal(y_true, 0)
t1 = tf.equal(y_true, 1)
p0 = tf.equal(tf.round(y_pred), 0)
p1 = tf.equal(tf.round(y_pred), 1)
everything = tf.reduce_sum(tf.cast(t0, tf.int32)) + tf.reduce_sum(tf.cast(t1, tf.int32))
positives = tf.reduce_sum(tf.cast(tf.logical_and(t0, p0), tf.int32)) + tf.reduce_sum(tf.cast(tf.logical_and(p1, t1), tf.int32))
return positives / everything
def precision(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(tf.multiply(y_true, tf.cast(tf.not_equal(y_true, -1), tf.float32)) * tf.multiply(y_pred, tf.cast(tf.not_equal(y_true, -1), tf.float32)), 0, 1)))
predicted_positives = K.sum(K.round(K.clip(tf.multiply(y_pred, tf.cast(tf.not_equal(y_true, -1), tf.float32)), 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
def recall(y_true, y_pred):
true_positives = K.sum(K.round(K.clip(tf.multiply(y_true, tf.cast(tf.not_equal(y_true, -1), tf.float32)) * tf.multiply(y_pred, tf.cast(tf.not_equal(y_true, -1), tf.float32)), 0, 1)))
possible_positives = K.sum(K.round(K.clip(tf.multiply(y_true, tf.cast(tf.not_equal(y_true, -1), tf.float32)), 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall