Skip to content

Commit

Permalink
codeclimate fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
mobra7 committed Oct 28, 2024
1 parent a587e18 commit 6efa538
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 20 deletions.
2 changes: 1 addition & 1 deletion examples/04_training/06_train_icemix_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def main(
"add_norm_layer": True,
"skip_readout": True,
},
fourier_mapping = [0, 1, 2, 3, None, None],
fourier_mapping=[0, 1, 2, 3, None, None],
)
task = DirectionReconstructionWithKappa(
hidden_size=backbone.nb_outputs,
Expand Down
44 changes: 29 additions & 15 deletions src/graphnet/models/components/embedding.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@ class FourierEncoder(LightningModule):
This module incorporates sinusoidal positional embeddings and auxiliary
embeddings to process input sequences and produce meaningful
representations. The features x, y, z and time are mandatory, while charge
and auxiliary are optional. Please use the mapping to ensure correct fourier
encoding.
and auxiliary are optional. Please use the mapping to ensure correct
fourier encoding.
"""

def __init__(
Expand All @@ -79,7 +79,8 @@ def __init__(
depending on `n_features`.
output_dim: Dimension of the output (I.e. number of columns).
scaled: Whether or not to scale the embeddings.
mapping: Mapping of the data to [x,y,z,time,charge,auxiliary]. Use None for missing features.
mapping: Mapping of the data to [x,y,z,time,charge,auxiliary].
Use None for missing features.
"""
super().__init__()
self.mapping_str = ["x", "y", "z", "time", "charge", "auxiliary"]
Expand All @@ -88,11 +89,17 @@ def __init__(
self.sin_emb = SinusoidalPosEmb(dim=seq_length, scaled=scaled)
self.sin_emb2 = SinusoidalPosEmb(dim=seq_length // 2, scaled=scaled)

assert len(mapping) == 6, "Fourier mapping must have 6 elements. Use None for missing features."
assert all([isinstance(i, int) or i is None for i in mapping]), "Use int or None in fourier mapping."
assert len(mapping) == 6, (
"Fourier mapping must have 6 elements. Use None for missing features."
)
assert all(
isinstance(i, int) or i is None for i in mapping
), "Use int or None in fourier mapping."

if any([i is None for i in mapping[:4]]):
missing = [self.mapping_str[i] for i in range(4) if mapping[i] is None]
missing = [
self.mapping_str[i] for i in range(4) if mapping[i] is None
]
raise ValueError(
f"x, y, z and time of the DOM are required."
f"{missing} missing in mapping."
Expand All @@ -113,7 +120,6 @@ def __init__(
nn.Linear(mlp_dim, output_dim),
)


def forward(
self,
x: Tensor,
Expand All @@ -122,24 +128,32 @@ def forward(
"""Forward pass."""
mapping_max = max(i for i in self.mapping if i is not None)+1
if mapping_max > x.shape[2]:
raise IndexError(f"Fourier mapping does not fit given data."
f"Feature space of data is too small (size {x.shape[2]}),"
f"given fourier mapping requires at least {mapping_max}.")
raise IndexError(
f"Fourier mapping does not fit given data."
f"Feature space of data is too small (size {x.shape[2]}),"
f"given fourier mapping requires at least {mapping_max}."
)

length = torch.log10(seq_length.to(dtype=x.dtype))
embeddings = [self.sin_emb(4096 * x[:, :, self.mapping[:3]]).flatten(-2)] # Position

# Position
embeddings = [self.sin_emb(4096 * x[:, :, self.mapping[:3]]).flatten(-2)]

# Charge
if self.n_features >= 5:
embeddings.append(self.sin_emb(1024 * x[:, :, self.mapping[4]])) # Charge
embeddings.append(self.sin_emb(1024 * x[:, :, self.mapping[4]]))

embeddings.append(self.sin_emb(4096 * x[:, :, self.mapping[3]])) # Time
# Time
embeddings.append(self.sin_emb(4096 * x[:, :, self.mapping[3]]))

# Auxiliary
if self.n_features == 6:
embeddings.append(self.aux_emb(x[:, :, self.mapping[5]].long())) # Auxiliary
embeddings.append(self.aux_emb(x[:, :, self.mapping[5]].long()))

# Length
embeddings.append(
self.sin_emb2(length).unsqueeze(1).expand(-1, max(seq_length), -1)
) # Length
)

x = torch.cat(embeddings, -1)
x = self.mlp(x)
Expand Down
9 changes: 5 additions & 4 deletions src/graphnet/models/gnn/icemix.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def __init__(
scaled_emb: bool = False,
include_dynedge: bool = False,
dynedge_args: Dict[str, Any] = None,
fourier_mapping: list = [0,1,2,3,4,5]
fourier_mapping: list = [0, 1, 2, 3, 4, 5]
):
"""Construct `DeepIce`.
Expand All @@ -62,8 +62,9 @@ def __init__(
provided, DynEdge will be initialized with the original Kaggle
Competition settings. If `include_dynedge` is False, this
argument have no impact.
fourier_mapping: Mapping of the data to [x,y,z,time,charge,auxiliary]
for the FourierEncoder. Use None for missing features.
fourier_mapping: Mapping of the data to [x,y,z,time,charge,
auxiliary] for the FourierEncoder. Use None for missing
features.
"""
super().__init__(seq_length, hidden_dim)
fourier_out_dim = hidden_dim // 2 if include_dynedge else hidden_dim
Expand All @@ -72,7 +73,7 @@ def __init__(
mlp_dim=None,
output_dim=fourier_out_dim,
scaled=scaled_emb,
mapping = fourier_mapping,
mapping=fourier_mapping,
)
self.rel_pos = SpacetimeEncoder(head_size)
self.sandwich = nn.ModuleList(
Expand Down

0 comments on commit 6efa538

Please sign in to comment.