Skip to content

Commit

Permalink
support channels other than rgb
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 27, 2021
1 parent 9d7aa45 commit 4ca2734
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'transformer-in-transformer',
packages = find_packages(),
version = '0.1.1',
version = '0.1.2',
license='MIT',
description = 'Transformer in Transformer - Pytorch',
author = 'Phil Wang',
Expand Down
3 changes: 2 additions & 1 deletion transformer_in_transformer/tnt.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def __init__(
pixel_size,
depth,
num_classes,
channels = 3,
heads = 8,
dim_head = 64,
ff_dropout = 0.,
Expand Down Expand Up @@ -116,7 +117,7 @@ def __init__(
Rearrange('b c (h p1) (w p2) -> (b h w) c p1 p2', p1 = patch_size, p2 = patch_size),
nn.Unfold(kernel_size = kernel_size, stride = stride, padding = padding),
Rearrange('... c n -> ... n c'),
nn.Linear(3 * kernel_size ** 2, pixel_dim)
nn.Linear(channels * kernel_size ** 2, pixel_dim)
)

self.patch_pos_emb = nn.Parameter(torch.randn(num_patch_tokens + 1, patch_dim))
Expand Down

0 comments on commit 4ca2734

Please sign in to comment.