From 4d4a733784855041e2b9e3427d38b9d488d5eb96 Mon Sep 17 00:00:00 2001 From: Haresh Karnan Date: Wed, 6 Jul 2022 21:29:08 -0500 Subject: [PATCH] changed Tanh layer to Sigmoid --- models/beta_vae.py | 2 +- models/betatc_vae.py | 2 +- models/cat_vae.py | 2 +- models/cvae.py | 2 +- models/dfcvae.py | 2 +- models/dip_vae.py | 2 +- models/fvae.py | 2 +- models/hvae.py | 2 +- models/info_vae.py | 2 +- models/iwae.py | 2 +- models/joint_vae.py | 2 +- models/logcosh_vae.py | 2 +- models/lvae.py | 2 +- models/miwae.py | 2 +- models/mssim_vae.py | 2 +- models/swae.py | 2 +- models/twostage_vae.py | 2 +- models/vampvae.py | 4 ++-- models/vanilla_vae.py | 2 +- models/vq_vae.py | 2 +- models/wae_mmd.py | 2 +- 21 files changed, 22 insertions(+), 22 deletions(-) diff --git a/models/beta_vae.py b/models/beta_vae.py index 7d709a1b..06e96b7d 100644 --- a/models/beta_vae.py +++ b/models/beta_vae.py @@ -83,7 +83,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/betatc_vae.py b/models/betatc_vae.py index e40d0fa9..d609b46c 100644 --- a/models/betatc_vae.py +++ b/models/betatc_vae.py @@ -79,7 +79,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/cat_vae.py b/models/cat_vae.py index 84212980..e64e6404 100644 --- a/models/cat_vae.py +++ b/models/cat_vae.py @@ -83,7 +83,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) self.sampling_dist = torch.distributions.OneHotCategorical(1. / categorical_dim * torch.ones((self.categorical_dim, 1))) def encode(self, input: Tensor) -> List[Tensor]: diff --git a/models/cvae.py b/models/cvae.py index 7886ae96..6f1b7ed6 100644 --- a/models/cvae.py +++ b/models/cvae.py @@ -78,7 +78,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/dfcvae.py b/models/dfcvae.py index 18da6ab9..3a2838e9 100644 --- a/models/dfcvae.py +++ b/models/dfcvae.py @@ -76,7 +76,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) self.feature_network = vgg19_bn(pretrained=True) diff --git a/models/dip_vae.py b/models/dip_vae.py index e88cf067..d230a756 100644 --- a/models/dip_vae.py +++ b/models/dip_vae.py @@ -73,7 +73,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/fvae.py b/models/fvae.py index f4c97034..186f691e 100644 --- a/models/fvae.py +++ b/models/fvae.py @@ -73,7 +73,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) # Discriminator network for the Total Correlation (TC) loss self.discriminator = nn.Sequential(nn.Linear(self.latent_dim, 1000), diff --git a/models/hvae.py b/models/hvae.py index fdd46c68..75db96bc 100644 --- a/models/hvae.py +++ b/models/hvae.py @@ -101,7 +101,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) # ========================================================================# # Pesudo Input for the Vamp-Prior diff --git a/models/info_vae.py b/models/info_vae.py index 8e735665..dee009ee 100644 --- a/models/info_vae.py +++ b/models/info_vae.py @@ -83,7 +83,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/iwae.py b/models/iwae.py index 2b32356f..f608c971 100644 --- a/models/iwae.py +++ b/models/iwae.py @@ -73,7 +73,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/joint_vae.py b/models/joint_vae.py index 4192669a..b720b4e3 100644 --- a/models/joint_vae.py +++ b/models/joint_vae.py @@ -105,7 +105,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) self.sampling_dist = torch.distributions.OneHotCategorical(1. / categorical_dim * torch.ones((self.categorical_dim, 1))) def encode(self, input: Tensor) -> List[Tensor]: diff --git a/models/logcosh_vae.py b/models/logcosh_vae.py index f06b857f..d756044e 100644 --- a/models/logcosh_vae.py +++ b/models/logcosh_vae.py @@ -73,7 +73,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/lvae.py b/models/lvae.py index 4d5ca3eb..d9681696 100644 --- a/models/lvae.py +++ b/models/lvae.py @@ -128,7 +128,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) hidden_dims.reverse() def encode(self, input: Tensor) -> List[Tensor]: diff --git a/models/miwae.py b/models/miwae.py index 146c7586..0e44e8bd 100644 --- a/models/miwae.py +++ b/models/miwae.py @@ -76,7 +76,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/mssim_vae.py b/models/mssim_vae.py index 2933756f..8eb4d573 100644 --- a/models/mssim_vae.py +++ b/models/mssim_vae.py @@ -75,7 +75,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) self.mssim_loss = MSSIM(self.in_channels, window_size, diff --git a/models/swae.py b/models/swae.py index 9df3ba96..0ad744e7 100644 --- a/models/swae.py +++ b/models/swae.py @@ -79,7 +79,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> Tensor: """ diff --git a/models/twostage_vae.py b/models/twostage_vae.py index 7c7981e0..7d67d287 100644 --- a/models/twostage_vae.py +++ b/models/twostage_vae.py @@ -70,7 +70,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) #---------------------- Second VAE ---------------------------# encoder2 = [] diff --git a/models/vampvae.py b/models/vampvae.py index 2cf99126..b5e2b424 100644 --- a/models/vampvae.py +++ b/models/vampvae.py @@ -73,11 +73,11 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) self.pseudo_input = torch.eye(self.num_components, requires_grad= False) self.embed_pseudo = nn.Sequential(nn.Linear(self.num_components, 12288), - nn.Hardtanh(0.0, 1.0)) # 3x64x64 = 12288 + nn.HardSigmoid(0.0, 1.0)) # 3x64x64 = 12288 def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/vanilla_vae.py b/models/vanilla_vae.py index 768d25b5..ff07bb1f 100644 --- a/models/vanilla_vae.py +++ b/models/vanilla_vae.py @@ -72,7 +72,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> List[Tensor]: """ diff --git a/models/vq_vae.py b/models/vq_vae.py index bd2249c6..b55d1c39 100644 --- a/models/vq_vae.py +++ b/models/vq_vae.py @@ -161,7 +161,7 @@ def __init__(self, out_channels=3, kernel_size=4, stride=2, padding=1), - nn.Tanh())) + nn.Sigmoid())) self.decoder = nn.Sequential(*modules) diff --git a/models/wae_mmd.py b/models/wae_mmd.py index f44c509a..54928c4d 100644 --- a/models/wae_mmd.py +++ b/models/wae_mmd.py @@ -76,7 +76,7 @@ def __init__(self, nn.LeakyReLU(), nn.Conv2d(hidden_dims[-1], out_channels= 3, kernel_size= 3, padding= 1), - nn.Tanh()) + nn.Sigmoid()) def encode(self, input: Tensor) -> Tensor: """