Skip to content

Commit

Permalink
use kaiming initialization for conv layers
Browse files Browse the repository at this point in the history
  • Loading branch information
pattonw committed Feb 14, 2024
1 parent 39915ac commit c140922
Showing 1 changed file with 7 additions and 0 deletions.
7 changes: 7 additions & 0 deletions dacapo/experiments/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,13 @@ def __init__(
)
self.eval_activation = eval_activation

# UPDATE WEIGHT INITIALIZATION TO USE KAIMING
# TODO: put this somewhere better, there might be
# conv layers that aren't follwed by relus?
for _name, layer in self.named_modules():
if isinstance(layer, torch.nn.modules.conv._ConvNd):
torch.nn.init.kaiming_normal_(layer.weight, nonlinearity="relu")

def forward(self, x):
result = self.chain(x)
if not self.training and self.eval_activation is not None:
Expand Down

0 comments on commit c140922

Please sign in to comment.