Skip to content

Commit

Permalink
mlp_bias Default = True
Browse files Browse the repository at this point in the history
  • Loading branch information
Aries-chen committed Feb 16, 2022
1 parent fa8da5b commit 1eb58de
Showing 1 changed file with 12 additions and 10 deletions.
22 changes: 12 additions & 10 deletions flowvision/layers/attention/se.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from typing import Optional

import oneflow as flow
import oneflow.nn as nn

from oneflow.nn import ReLU, Sigmoid

class SEModule(nn.Module):
"""
Expand All @@ -11,19 +13,19 @@ class SEModule(nn.Module):
channels (int): The input channel size
reduction (int): Ratio that allows us to vary the capacity and computational cost of the SE Module. Default: 16
rd_channels (int or None): Number of reduced channels. If none, uses reduction to calculate
act_layer (flow.nn.Module): An activation layer used after the first FC layer. Default: flow.nn.ReLU
gate_layer (flow.nn.Module): An activation layer used after the second FC layer. Default: flow.nn.Sigmoid
mlp_bias (bool): If True, add learnable bias to the linear layers. Default: False
act_layer (Optional[ReLU]): An activation layer used after the first FC layer. Default: flow.nn.ReLU
gate_layer (Optional[Sigmoid]): An activation layer used after the second FC layer. Default: flow.nn.Sigmoid
mlp_bias (bool): If True, add learnable bias to the linear layers. Default: True
"""

def __init__(
self,
channels,
reduction=16,
rd_channels=None,
act_layer=nn.ReLU,
gate_layer=nn.Sigmoid,
mlp_bias=False,
channels: int,
reduction: int = 16,
rd_channels: int = None,
act_layer: Optional[ReLU] = nn.ReLU,
gate_layer: Optional[Sigmoid] = nn.Sigmoid,
mlp_bias=True,
):
super(SEModule, self).__init__()
rd_channels = channels // reduction if rd_channels is None else rd_channels
Expand Down

0 comments on commit 1eb58de

Please sign in to comment.