Skip to content

Commit

Permalink
make sure final sine layer uses passed in w0
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jun 20, 2020
1 parent 4f564fc commit 5d3db8c
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'siren-pytorch',
packages = find_packages(),
version = '0.0.2',
version = '0.0.3',
license='MIT',
description = 'Implicit Neural Representations with Periodic Activation Functions',
author = 'Phil Wang',
Expand Down
2 changes: 1 addition & 1 deletion siren_pytorch/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from siren_pytorch.siren_pytorch import Siren, SirenNet
from siren_pytorch.siren_pytorch import Siren, SirenNet, Sine
4 changes: 2 additions & 2 deletions siren_pytorch/siren_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def forward(self, x):
# siren network

class SirenNet(nn.Module):
def __init__(self, dim_in, dim_hidden, dim_out, num_layers, w0 = 1., w0_initial = 30., c = 6., use_bias = True, final_activation = None):
def __init__(self, dim_in, dim_hidden, dim_out, num_layers, w0 = 1., w0_initial = 30., use_bias = True, final_activation = None):
super().__init__()
layers = []
for ind in range(num_layers):
Expand All @@ -60,7 +60,7 @@ def __init__(self, dim_in, dim_hidden, dim_out, num_layers, w0 = 1., w0_initial
))

self.net = nn.Sequential(*layers)
self.last_layer = Siren(dim_in = dim_hidden, dim_out = dim_out, use_bias = use_bias, activation = final_activation)
self.last_layer = Siren(dim_in = dim_hidden, dim_out = dim_out, w0 = w0, use_bias = use_bias, activation = final_activation)

def forward(self, x):
x = self.net(x)
Expand Down

0 comments on commit 5d3db8c

Please sign in to comment.