Skip to content

Commit

Permalink
add se_adapter and locon
Browse files Browse the repository at this point in the history
  • Loading branch information
hy395 committed Jul 3, 2024
1 parent cdf3634 commit c5b6724
Show file tree
Hide file tree
Showing 8 changed files with 538 additions and 533 deletions.
75 changes: 0 additions & 75 deletions src/baskerville/HY_helper.py

This file was deleted.

43 changes: 1 addition & 42 deletions src/baskerville/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,6 @@ def conv_dna(
conv_type="standard",
kernel_initializer="he_normal",
padding="same",
transfer_se=False,
se_ratio=16,
):
"""Construct a single convolution block, assumed to be operating on DNA.
Expand Down Expand Up @@ -197,19 +195,7 @@ def conv_dna(
kernel_initializer=kernel_initializer,
kernel_regularizer=tf.keras.regularizers.l2(l2_scale),
)(current)

# squeeze-excite for transfer
if transfer_se:
se_out = squeeze_excite(current,
activation=None,
additive=False,
bottleneck_ratio=se_ratio,
use_bias=False,
kernel_initializer=tf.keras.initializers.TruncatedNormal(stddev=1e-3),
scale_fun='tanh'
)
current = current + se_out


# squeeze-excite
if se:
current = squeeze_excite(current)
Expand Down Expand Up @@ -281,8 +267,6 @@ def conv_nac(
kernel_initializer="he_normal",
padding="same",
se=False,
transfer_se=False,
se_ratio=16,
):
"""Construct a single convolution block.
Expand Down Expand Up @@ -342,18 +326,6 @@ def conv_nac(
kernel_regularizer=tf.keras.regularizers.l2(l2_scale),
)(current)

# squeeze-excite for transfer
if transfer_se:
se_out = squeeze_excite(current,
activation=None,
additive=False,
bottleneck_ratio=se_ratio,
use_bias=False,
kernel_initializer=tf.keras.initializers.TruncatedNormal(stddev=1e-3),
scale_fun='tanh'
)
current = current + se_out

# squeeze-excite
if se:
current = squeeze_excite(current)
Expand Down Expand Up @@ -484,8 +456,6 @@ def unet_conv(
bn_momentum=0.99,
kernel_size=1,
kernel_initializer="he_normal",
transfer_se=False,
se_ratio=16,
upsample_conv=False,
):
"""Construct a feature pyramid network block.
Expand Down Expand Up @@ -561,17 +531,6 @@ def unet_conv(
kernel_initializer=kernel_initializer,
)(current)

if transfer_se:
se_out = squeeze_excite(current,
activation=None,
additive=False,
bottleneck_ratio=se_ratio,
use_bias=False,
kernel_initializer=tf.keras.initializers.TruncatedNormal(stddev=1e-3),
scale_fun='tanh'
)
current = current + se_out

# dropout
if dropout > 0:
current = tf.keras.layers.Dropout(dropout)(current)
Expand Down
Loading

0 comments on commit c5b6724

Please sign in to comment.