Skip to content

Commit

Permalink
add inusoidal positional embedding
Browse files Browse the repository at this point in the history
  • Loading branch information
Aske-Rosted committed Jan 23, 2024
1 parent dd504bd commit e30b52a
Showing 1 changed file with 27 additions and 0 deletions.
27 changes: 27 additions & 0 deletions src/graphnet/models/components/embedding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
"""Classes for performing embedding of input data."""
import torch


class SinusoidalPosEmb(torch.nn.Module):
"""Sinusoidal positional embedding layer."""

def __init__(self, dim: int = 16, M: int = 10000) -> None:
"""Construct `SinusoidalPosEmb`.
Args:
dim: Embedding dimension.
M: Number of frequencies.
"""
super().__init__()
self.dim = dim
self.M = M

def forward(self, x: torch.Tensor) -> torch.Tensor:
"""Apply learnable forward pass to the layer."""
device = x.device
half_dim = self.dim
emb = torch.log(torch.tensor(self.M, device=device)) / half_dim
emb = torch.exp(torch.arange(half_dim, device=device) * (-emb))
emb = x[..., None] * emb[None, ...]
emb = torch.cat((emb.sin(), emb.cos()), dim=-1)
return emb

0 comments on commit e30b52a

Please sign in to comment.