diff --git a/src/graphnet/models/components/embedding.py b/src/graphnet/models/components/embedding.py index da39716a5..ee03a2193 100644 --- a/src/graphnet/models/components/embedding.py +++ b/src/graphnet/models/components/embedding.py @@ -11,22 +11,22 @@ class SinusoidalPosEmb(torch.nn.Module): digitization of the input data """ - def __init__(self, dim: int = 16, m: int = 10000) -> None: + def __init__(self, dim: int = 16, n_freq: int = 10000) -> None: """Construct `SinusoidalPosEmb`. Args: dim: Embedding dimension. - m: Number of frequencies. + n_freq: Number of frequencies. """ super().__init__() self.dim = dim - self.m = m + self.n_freq = n_freq def forward(self, x: torch.Tensor) -> torch.Tensor: """Apply learnable forward pass to the layer.""" device = x.device half_dim = self.dim // 2 - emb = torch.log(torch.tensor(self.m, device=device)) / half_dim + emb = torch.log(torch.tensor(self.n_freq, device=device)) / half_dim emb = torch.exp(torch.arange(half_dim, device=device) * (-emb)) emb = x[..., None] * emb[None, ...] emb = torch.cat((emb.sin(), emb.cos()), dim=-1) diff --git a/src/graphnet/models/rnn/node_rnn.py b/src/graphnet/models/rnn/node_rnn.py index 8410e9fec..6e2b2ba28 100644 --- a/src/graphnet/models/rnn/node_rnn.py +++ b/src/graphnet/models/rnn/node_rnn.py @@ -19,7 +19,7 @@ class Node_RNN(GNN): The model takes as input the typical DOM data format and transforms it into a time series of DOM activations pr. DOM. before applying a RNN layer and - outputting the an RNN output for each DOM. This model is in it's current + outputting the an RNN output for each DOM. This model is in its current state not intended to be used as a standalone model. Furthermore, it needs to be used with a time-series dataset and a "cutter" (see NodeAsDOMTimeSeries), which is not standard in the graphnet framework.