Skip to content

Commit

Permalink
add/remove new/old losses
Browse files Browse the repository at this point in the history
  • Loading branch information
charlie-becker committed Feb 29, 2024
1 parent 68546fa commit 5f55c97
Showing 1 changed file with 6 additions and 108 deletions.
114 changes: 6 additions & 108 deletions mlguess/keras/losses.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import numpy as np
import logging
import keras
import keras.ops as ops

Expand All @@ -13,70 +12,8 @@
from torch.special import digamma
from torch import lgamma


class DirichletEvidentialLoss(keras.losses.Loss):
"""
Loss function for an evidential categorical model.
Args:
callback (list): List of callbacks.
name (str): reference name
this_epoch_num (int): Epoch callback
class_weights (list): List of class weights (experimental)
"""
def __init__(self, callback=None, name="dirichlet", this_epoch_num=None, class_weights=None):

super().__init__()
self.callback = callback
self.__name__ = name
self.class_weights = class_weights
self.this_epoch_num = this_epoch_num
if self.class_weights:
logging.warning("The application of class weights to this loss is experimental.")

def kl(self, alpha):
beta = ops.ones((1, alpha.shape[1]), dtype="float32")
S_alpha = ops.sum(alpha, axis=1, keepdims=True)
S_beta = ops.sum(beta, axis=1, keepdims=True)
lnB = lgamma(S_alpha) - ops.sum(
lgamma(alpha), axis=1, keepdims=True
)
lnB_uni = ops.sum(
lgamma(beta), axis=1, keepdims=True
) - lgamma(S_beta)

dg0 = digamma(S_alpha)
dg1 = digamma(alpha)

if self.class_weights:
kl = (ops.sum(self.class_weights * (alpha - beta) * (dg1 - dg0), axis=1, keepdims=True) + lnB +
lnB_uni)
else:
kl = (ops.sum((alpha - beta) * (dg1 - dg0), axis=1, keepdims=True) + lnB + lnB_uni)
return kl

def __call__(self, y, output, sample_weight=None):
evidence = ops.relu(output)
alpha = evidence + 1

S = ops.sum(alpha, axis=1, keepdims=True)
m = alpha / S

if self.class_weights:
A = ops.sum(self.class_weights * (y - m) ** 2, axis=1, keepdims=True)
B = ops.sum(self.class_weights * alpha * (S - alpha) / (S * S * (S + 1)), axis=1, keepdims=True)
else:
A = ops.sum((y - m) ** 2, axis=1, keepdims=True)
B = ops.sum(alpha * (S - alpha) / (S * S * (S + 1)), axis=1, keepdims=True)

annealing_coef = ops.minimum(1.0, self.this_epoch_num / self.callback.annealing_coef)
alpha_hat = y + (1 - y) * alpha
C = annealing_coef * self.KL(alpha_hat)
C = ops.mean(C, axis=1)

return ops.mean(A + B + C)

@keras.saving.register_keras_serializable()
def EvidentialCatLoss(evi_coef, current_epoch, class_weights=None):
def evidential_cat_loss(evi_coef, epoch_callback, class_weights=None):

def calc_kl(alpha):
beta = ops.ones(shape=(1, alpha.shape[1]), dtype="float32")
Expand All @@ -95,6 +32,7 @@ def calc_kl(alpha):

@keras.saving.register_keras_serializable()
def loss(y, y_pred):
current_epoch = epoch_callback.epoch_var
evidence = ops.relu(y_pred)
alpha = evidence + 1
s = ops.sum(alpha, axis=1, keepdims=True)
Expand All @@ -116,52 +54,9 @@ def loss(y, y_pred):

return loss

class EvidentialRegressionLoss(keras.losses.Loss):
"""
Loss function for an evidential regression model. The total loss is the Negative Log Likelihood of the
Normal Inverse Gamma summed with the error and scaled by the evidential coefficient. The coefficient has a strong
influence on the uncertainty predictions (less so for the predictions themselves) of the model and must be tuned
for individual datasets.
Loss = loss_nll + coeff * loss_reg
Args:
coeff (float): Evidential Coefficient
"""
def __init__(self, coeff=1.0):
super(EvidentialRegressionLoss, self).__init__()
self.coeff = coeff

def nig_nll(self, y, gamma, v, alpha, beta, reduce=True):
v = ops.maximum(v, keras.backend.epsilon())
twoBlambda = 2 * beta * (1 + v)
nll = (0.5 * ops.log(np.pi / v)
- alpha * ops.log(twoBlambda)
+ (alpha + 0.5) * ops.log(v * (y - gamma) ** 2 + twoBlambda)
+ lgamma(alpha)
- lgamma(alpha + 0.5))

return ops.mean(nll) if reduce else nll

def nig_reg(self, y, gamma, v, alpha, reduce=True):
error = ops.abs(y - gamma)
evi = 2 * v + alpha
reg = error * evi

return ops.mean(reg) if reduce else reg

def call(self, y_true, evidential_output):
gamma, v, alpha, beta = ops.split(evidential_output, 4, axis=-1)
loss_nll = self.nig_nll(y_true, gamma, v, alpha, beta)
loss_reg = self.nig_reg(y_true, gamma, v, alpha)

return loss_nll + self.coeff * loss_reg

def get_config(self):
config = super(EvidentialRegressionLoss, self).get_config()
config.update({"coeff": self.coeff})
return config

@keras.saving.register_keras_serializable()
def EvidentialRegLoss(evi_coef):
def evidential_reg_loss(evi_coef):
"""
Loss function for an evidential regression model. The total loss is the Negative Log Likelihood of the
Normal Inverse Gamma summed with the error and scaled by the evidential coefficient. The coefficient has a strong
Expand Down Expand Up @@ -261,3 +156,6 @@ def get_config(self):
config = super(EvidentialRegressionCoupledLoss, self).get_config()
config.update({"r": self.r, "coeff": self.coeff})
return config



0 comments on commit 5f55c97

Please sign in to comment.