Skip to content

Commit

Permalink
prevent inverse sigmoid and softplus from returning +/- inf
Browse files Browse the repository at this point in the history
  • Loading branch information
Simon Kamuk Christiansen committed Dec 4, 2024
1 parent 4a27c85 commit da1480c
Showing 1 changed file with 8 additions and 2 deletions.
10 changes: 8 additions & 2 deletions neural_lam/models/base_graph_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,11 +201,17 @@ def inverse_softplus(x, beta=1, threshold=20):
# If x*beta is above threshold, returns linear function
# for numerical stability
under_lim = x * beta <= threshold
x[under_lim] = torch.log(torch.expm1(x[under_lim] * beta)) / beta
x[under_lim] = (
torch.log(
torch.clamp_min(torch.expm1(x[under_lim] * beta), 1e-6)
)
/ beta
)
return x

def inverse_sigmoid(x):
return torch.log(x / (1 - x))
x_clamped = torch.clamp(x, min=1e-6, max=1 - 1e-6)
return torch.log(x_clamped / (1 - x_clamped))

self.inverse_clamp_lower_upper = lambda x: (
sigmoid_center
Expand Down

0 comments on commit da1480c

Please sign in to comment.