-
Notifications
You must be signed in to change notification settings - Fork 2
/
PerformanceMetrics.py
33 lines (29 loc) · 1.29 KB
/
PerformanceMetrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
from keras import backend as K
class PerformanceMetrics(object):
@classmethod
def precision(cls, y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1)))
precision = true_positives / (predicted_positives + K.epsilon())
return precision
@classmethod
def recall(cls, y_true, y_pred):
true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1)))
possible_positives = K.sum(K.round(K.clip(y_true, 0, 1)))
recall = true_positives / (possible_positives + K.epsilon())
return recall
@classmethod
def fbeta_score(cls, y_true, y_pred, beta=1):
if beta < 0:
raise ValueError('The lowest choosable beta is zero (only precision).')
# If there are no true positives, fix the F score at 0 like sklearn.
if K.sum(K.round(K.clip(y_true, 0, 1))) == 0:
return 0
p = PerformanceMetrics.precision(y_true, y_pred)
r = PerformanceMetrics.recall(y_true, y_pred)
bb = beta ** 2
fbeta_score = (1 + bb) * (p * r) / (bb * p + r + K.epsilon())
return fbeta_score
@classmethod
def fmeasure(cls, y_true, y_pred):
return PerformanceMetrics.fbeta_score(y_true, y_pred, beta=1)